Detailed changes
@@ -1,4 +1,4 @@
-# yaml-language-server: $schema=https://json.schemastore.org/github-issue-config.json
+# yaml-language-server: $schema=https://www.schemastore.org/github-issue-config.json
blank_issues_enabled: false
contact_links:
- name: Feature Request
@@ -35,6 +35,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: compare_perf::run_perf::install_hyperfine
run: cargo install hyperfine
shell: bash -euxo pipefail {0}
@@ -57,16 +57,19 @@ jobs:
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@v1
+ with:
+ cache: rust
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- - name: steps::cache_rust_dependencies_namespace
- uses: namespacelabs/nscloud-cache-action@v1
- with:
- cache: rust
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
@@ -202,6 +205,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
@@ -242,6 +248,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
@@ -93,6 +93,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
@@ -140,6 +143,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
@@ -6,24 +6,21 @@ env:
CARGO_INCREMENTAL: '0'
RUST_BACKTRACE: '1'
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
+ OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
+ GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
+ GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_EVAL_TELEMETRY: '1'
+ MODEL_NAME: ${{ inputs.model_name }}
on:
- pull_request:
- types:
- - synchronize
- - reopened
- - labeled
- branches:
- - '**'
- schedule:
- - cron: 0 0 * * *
- workflow_dispatch: {}
+ workflow_dispatch:
+ inputs:
+ model_name:
+ description: model_name
+ required: true
+ type: string
jobs:
agent_evals:
- if: |
- github.repository_owner == 'zed-industries' &&
- (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
runs-on: namespace-profile-16x32-ubuntu-2204
steps:
- name: steps::checkout_repo
@@ -40,6 +37,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: steps::setup_cargo_config
run: |
mkdir -p ./../.cargo
@@ -49,14 +49,19 @@ jobs:
run: cargo build --package=eval
shell: bash -euxo pipefail {0}
- name: run_agent_evals::agent_evals::run_eval
- run: cargo run --package=eval -- --repetitions=8 --concurrency=1
+ run: cargo run --package=eval -- --repetitions=8 --concurrency=1 --model "${MODEL_NAME}"
shell: bash -euxo pipefail {0}
+ env:
+ ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
+ OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
+ GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
+ GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
- name: steps::cleanup_cargo_config
if: always()
run: |
rm -rf ./../.cargo
shell: bash -euxo pipefail {0}
- timeout-minutes: 60
+ timeout-minutes: 600
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
cancel-in-progress: true
@@ -34,6 +34,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
@@ -74,6 +77,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
@@ -0,0 +1,78 @@
+# Generated from xtask::workflows::run_cron_unit_evals
+# Rebuild with `cargo xtask workflows`.
+name: run_cron_unit_evals
+env:
+ CARGO_TERM_COLOR: always
+ CARGO_INCREMENTAL: '0'
+ RUST_BACKTRACE: '1'
+ ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+on:
+ schedule:
+ - cron: 47 1 * * 2
+ workflow_dispatch: {}
+jobs:
+ cron_unit_evals:
+ runs-on: namespace-profile-16x32-ubuntu-2204
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ - name: steps::setup_cargo_config
+ run: |
+ mkdir -p ./../.cargo
+ cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+ shell: bash -euxo pipefail {0}
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@v1
+ with:
+ cache: rust
+ - name: steps::setup_linux
+ run: ./script/linux
+ shell: bash -euxo pipefail {0}
+ - name: steps::install_mold
+ run: ./script/install-mold
+ shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
+ - name: steps::cargo_install_nextest
+ run: cargo install cargo-nextest --locked
+ shell: bash -euxo pipefail {0}
+ - name: steps::clear_target_dir_if_large
+ run: ./script/clear-target-dir-if-larger-than 250
+ shell: bash -euxo pipefail {0}
+ - name: ./script/run-unit-evals
+ run: ./script/run-unit-evals
+ shell: bash -euxo pipefail {0}
+ env:
+ ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
+ OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
+ GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
+ GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
+ - name: run_agent_evals::unit_evals::send_failure_to_slack
+ if: ${{ failure() }}
+ uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
+ with:
+ method: chat.postMessage
+ token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
+ payload: |
+ channel: C04UDRNNJFQ
+ text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
+ - name: steps::cleanup_cargo_config
+ if: always()
+ run: |
+ rm -rf ./../.cargo
+ shell: bash -euxo pipefail {0}
+ - name: run_agent_evals::cron_unit_evals::send_failure_to_slack
+ if: ${{ failure() }}
+ uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
+ with:
+ method: chat.postMessage
+ token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
+ payload: |
+ channel: C04UDRNNJFQ
+ text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
+ cancel-in-progress: true
@@ -143,16 +143,19 @@ jobs:
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@v1
+ with:
+ cache: rust
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- - name: steps::cache_rust_dependencies_namespace
- uses: namespacelabs/nscloud-cache-action@v1
- with:
- cache: rust
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
@@ -232,6 +235,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: steps::setup_cargo_config
run: |
mkdir -p ./../.cargo
@@ -263,16 +269,19 @@ jobs:
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@v1
+ with:
+ cache: rust
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- - name: steps::cache_rust_dependencies_namespace
- uses: namespacelabs/nscloud-cache-action@v1
- with:
- cache: rust
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: cargo build -p collab
run: cargo build -p collab
shell: bash -euxo pipefail {0}
@@ -348,6 +357,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: run_tests::check_docs::install_mdbook
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08
with:
@@ -1,17 +1,26 @@
-# Generated from xtask::workflows::run_agent_evals
+# Generated from xtask::workflows::run_unit_evals
# Rebuild with `cargo xtask workflows`.
-name: run_agent_evals
+name: run_unit_evals
env:
CARGO_TERM_COLOR: always
CARGO_INCREMENTAL: '0'
RUST_BACKTRACE: '1'
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+ ZED_EVAL_TELEMETRY: '1'
+ MODEL_NAME: ${{ inputs.model_name }}
on:
- schedule:
- - cron: 47 1 * * 2
- workflow_dispatch: {}
+ workflow_dispatch:
+ inputs:
+ model_name:
+ description: model_name
+ required: true
+ type: string
+ commit_sha:
+ description: commit_sha
+ required: true
+ type: string
jobs:
- unit_evals:
+ run_unit_evals:
runs-on: namespace-profile-16x32-ubuntu-2204
steps:
- name: steps::checkout_repo
@@ -33,6 +42,9 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
+ - name: steps::download_wasi_sdk
+ run: ./script/download-wasi-sdk
+ shell: bash -euxo pipefail {0}
- name: steps::cargo_install_nextest
run: cargo install cargo-nextest --locked
shell: bash -euxo pipefail {0}
@@ -44,6 +56,10 @@ jobs:
shell: bash -euxo pipefail {0}
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
+ OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
+ GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
+ GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
+ UNIT_EVAL_COMMIT: ${{ inputs.commit_sha }}
- name: run_agent_evals::unit_evals::send_failure_to_slack
if: ${{ failure() }}
uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
@@ -96,6 +96,7 @@ dependencies = [
"auto_update",
"editor",
"extension_host",
+ "fs",
"futures 0.3.31",
"gpui",
"language",
@@ -1330,10 +1331,14 @@ version = "0.1.0"
dependencies = [
"anyhow",
"client",
+ "clock",
+ "ctor",
"db",
+ "futures 0.3.31",
"gpui",
"http_client",
"log",
+ "parking_lot",
"paths",
"release_channel",
"serde",
@@ -1344,6 +1349,7 @@ dependencies = [
"util",
"which 6.0.3",
"workspace",
+ "zlog",
]
[[package]]
@@ -7799,6 +7805,7 @@ dependencies = [
"parking_lot",
"serde",
"serde_json",
+ "serde_urlencoded",
"sha2",
"tempfile",
"url",
@@ -1,7 +1,7 @@
# Zed
[](https://zed.dev)
-[](https://github.com/zed-industries/zed/actions/workflows/ci.yml)
+[](https://github.com/zed-industries/zed/actions/workflows/run_tests.yml)
Welcome to Zed, a high-performance, multiplayer code editor from the creators of [Atom](https://github.com/atom/atom) and [Tree-sitter](https://github.com/tree-sitter/tree-sitter).
@@ -0,0 +1,4 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<path d="M8.00156 10.3996C9.32705 10.3996 10.4016 9.32509 10.4016 7.99961C10.4016 6.67413 9.32705 5.59961 8.00156 5.59961C6.67608 5.59961 5.60156 6.67413 5.60156 7.99961C5.60156 9.32509 6.67608 10.3996 8.00156 10.3996Z" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+<path d="M10.4 5.6V8.6C10.4 9.07739 10.5896 9.53523 10.9272 9.8728C11.2648 10.2104 11.7226 10.4 12.2 10.4C12.6774 10.4 13.1352 10.2104 13.4728 9.8728C13.8104 9.53523 14 9.07739 14 8.6V8C14 6.64839 13.5436 5.33636 12.7048 4.27651C11.8661 3.21665 10.694 2.47105 9.37852 2.16051C8.06306 1.84997 6.68129 1.99269 5.45707 2.56554C4.23285 3.13838 3.23791 4.1078 2.63344 5.31672C2.02898 6.52565 1.85041 7.90325 2.12667 9.22633C2.40292 10.5494 3.11782 11.7405 4.15552 12.6065C5.19323 13.4726 6.49295 13.9629 7.84411 13.998C9.19527 14.0331 10.5187 13.611 11.6 12.8" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
+</svg>
@@ -611,6 +611,10 @@
// to both the horizontal and vertical delta values while scrolling. Fast scrolling
// happens when a user holds the alt or option key while scrolling.
"fast_scroll_sensitivity": 4.0,
+ "sticky_scroll": {
+ // Whether to stick scopes to the top of the editor.
+ "enabled": false
+ },
"relative_line_numbers": "disabled",
// If 'search_wrap' is disabled, search result do not wrap around the end of the file.
"search_wrap": true,
@@ -618,9 +622,13 @@
"search": {
// Whether to show the project search button in the status bar.
"button": true,
+ // Whether to only match on whole words.
"whole_word": false,
+ // Whether to match case sensitively.
"case_sensitive": false,
+ // Whether to include gitignored files in search results.
"include_ignored": false,
+ // Whether to interpret the search query as a regular expression.
"regex": false,
// Whether to center the cursor on each search match when navigating.
"center_on_match": false
@@ -17,6 +17,7 @@ anyhow.workspace = true
auto_update.workspace = true
editor.workspace = true
extension_host.workspace = true
+fs.workspace = true
futures.workspace = true
gpui.workspace = true
language.workspace = true
@@ -51,6 +51,7 @@ pub struct ActivityIndicator {
project: Entity<Project>,
auto_updater: Option<Entity<AutoUpdater>>,
context_menu_handle: PopoverMenuHandle<ContextMenu>,
+ fs_jobs: Vec<fs::JobInfo>,
}
#[derive(Debug)]
@@ -99,6 +100,27 @@ impl ActivityIndicator {
})
.detach();
+ let fs = project.read(cx).fs().clone();
+ let mut job_events = fs.subscribe_to_jobs();
+ cx.spawn(async move |this, cx| {
+ while let Some(job_event) = job_events.next().await {
+ this.update(cx, |this: &mut ActivityIndicator, cx| {
+ match job_event {
+ fs::JobEvent::Started { info } => {
+ this.fs_jobs.retain(|j| j.id != info.id);
+ this.fs_jobs.push(info);
+ }
+ fs::JobEvent::Completed { id } => {
+ this.fs_jobs.retain(|j| j.id != id);
+ }
+ }
+ cx.notify();
+ })?;
+ }
+ anyhow::Ok(())
+ })
+ .detach();
+
cx.subscribe(
&project.read(cx).lsp_store(),
|activity_indicator, _, event, cx| {
@@ -201,7 +223,8 @@ impl ActivityIndicator {
statuses: Vec::new(),
project: project.clone(),
auto_updater,
- context_menu_handle: Default::default(),
+ context_menu_handle: PopoverMenuHandle::default(),
+ fs_jobs: Vec::new(),
}
});
@@ -432,6 +455,23 @@ impl ActivityIndicator {
});
}
+ // Show any long-running fs command
+ for fs_job in &self.fs_jobs {
+ if Instant::now().duration_since(fs_job.start) >= GIT_OPERATION_DELAY {
+ return Some(Content {
+ icon: Some(
+ Icon::new(IconName::ArrowCircle)
+ .size(IconSize::Small)
+ .with_rotate_animation(2)
+ .into_any_element(),
+ ),
+ message: fs_job.message.clone().into(),
+ on_click: None,
+ tooltip_message: None,
+ });
+ }
+ }
+
// Show any language server installation info.
let mut downloading = SmallVec::<[_; 3]>::new();
let mut checking_for_update = SmallVec::<[_; 3]>::new();
@@ -933,7 +933,7 @@ async fn test_profiles(cx: &mut TestAppContext) {
// Test that test-1 profile (default) has echo and delay tools
thread
.update(cx, |thread, cx| {
- thread.set_profile(AgentProfileId("test-1".into()));
+ thread.set_profile(AgentProfileId("test-1".into()), cx);
thread.send(UserMessageId::new(), ["test"], cx)
})
.unwrap();
@@ -953,7 +953,7 @@ async fn test_profiles(cx: &mut TestAppContext) {
// Switch to test-2 profile, and verify that it has only the infinite tool.
thread
.update(cx, |thread, cx| {
- thread.set_profile(AgentProfileId("test-2".into()));
+ thread.set_profile(AgentProfileId("test-2".into()), cx);
thread.send(UserMessageId::new(), ["test2"], cx)
})
.unwrap();
@@ -1002,8 +1002,8 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
)
.await;
cx.run_until_parked();
- thread.update(cx, |thread, _| {
- thread.set_profile(AgentProfileId("test".into()))
+ thread.update(cx, |thread, cx| {
+ thread.set_profile(AgentProfileId("test".into()), cx)
});
let mut mcp_tool_calls = setup_context_server(
@@ -1169,8 +1169,8 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) {
.await;
cx.run_until_parked();
- thread.update(cx, |thread, _| {
- thread.set_profile(AgentProfileId("test".into()));
+ thread.update(cx, |thread, cx| {
+ thread.set_profile(AgentProfileId("test".into()), cx);
thread.add_tool(EchoTool);
thread.add_tool(DelayTool);
thread.add_tool(WordListTool);
@@ -30,16 +30,17 @@ use gpui::{
};
use language_model::{
LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelExt,
- LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest,
- LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
- LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse,
- LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID,
+ LanguageModelId, LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry,
+ LanguageModelRequest, LanguageModelRequestMessage, LanguageModelRequestTool,
+ LanguageModelToolResult, LanguageModelToolResultContent, LanguageModelToolSchemaFormat,
+ LanguageModelToolUse, LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage,
+ ZED_CLOUD_PROVIDER_ID,
};
use project::Project;
use prompt_store::ProjectContext;
use schemars::{JsonSchema, Schema};
use serde::{Deserialize, Serialize};
-use settings::{Settings, update_settings_file};
+use settings::{LanguageModelSelection, Settings, update_settings_file};
use smol::stream::StreamExt;
use std::{
collections::BTreeMap,
@@ -798,7 +799,8 @@ impl Thread {
let profile_id = db_thread
.profile
.unwrap_or_else(|| AgentSettings::get_global(cx).default_profile.clone());
- let model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
+
+ let mut model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
db_thread
.model
.and_then(|model| {
@@ -811,6 +813,16 @@ impl Thread {
.or_else(|| registry.default_model())
.map(|model| model.model)
});
+
+ if model.is_none() {
+ model = Self::resolve_profile_model(&profile_id, cx);
+ }
+ if model.is_none() {
+ model = LanguageModelRegistry::global(cx).update(cx, |registry, _cx| {
+ registry.default_model().map(|model| model.model)
+ });
+ }
+
let (prompt_capabilities_tx, prompt_capabilities_rx) =
watch::channel(Self::prompt_capabilities(model.as_deref()));
@@ -1007,8 +1019,17 @@ impl Thread {
&self.profile_id
}
- pub fn set_profile(&mut self, profile_id: AgentProfileId) {
+ pub fn set_profile(&mut self, profile_id: AgentProfileId, cx: &mut Context<Self>) {
+ if self.profile_id == profile_id {
+ return;
+ }
+
self.profile_id = profile_id;
+
+ // Swap to the profile's preferred model when available.
+ if let Some(model) = Self::resolve_profile_model(&self.profile_id, cx) {
+ self.set_model(model, cx);
+ }
}
pub fn cancel(&mut self, cx: &mut Context<Self>) {
@@ -1065,6 +1086,35 @@ impl Thread {
})
}
+ /// Look up the active profile and resolve its preferred model if one is configured.
+ fn resolve_profile_model(
+ profile_id: &AgentProfileId,
+ cx: &mut Context<Self>,
+ ) -> Option<Arc<dyn LanguageModel>> {
+ let selection = AgentSettings::get_global(cx)
+ .profiles
+ .get(profile_id)?
+ .default_model
+ .clone()?;
+ Self::resolve_model_from_selection(&selection, cx)
+ }
+
+ /// Translate a stored model selection into the configured model from the registry.
+ fn resolve_model_from_selection(
+ selection: &LanguageModelSelection,
+ cx: &mut Context<Self>,
+ ) -> Option<Arc<dyn LanguageModel>> {
+ let selected = SelectedModel {
+ provider: LanguageModelProviderId::from(selection.provider.0.clone()),
+ model: LanguageModelId::from(selection.model.clone()),
+ };
+ LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
+ registry
+ .select_model(&selected, cx)
+ .map(|configured| configured.model)
+ })
+ }
+
pub fn resume(
&mut self,
cx: &mut Context<Self>,
@@ -136,7 +136,7 @@ impl AcpConnection {
while let Ok(n) = stderr.read_line(&mut line).await
&& n > 0
{
- log::warn!("agent stderr: {}", &line);
+ log::warn!("agent stderr: {}", line.trim());
line.clear();
}
Ok(())
@@ -50,13 +50,14 @@ impl crate::AgentServer for CustomAgentServer {
fn set_default_mode(&self, mode_id: Option<acp::SessionModeId>, fs: Arc<dyn Fs>, cx: &mut App) {
let name = self.name();
update_settings_file(fs, cx, move |settings, _| {
- settings
+ if let Some(settings) = settings
.agent_servers
.get_or_insert_default()
.custom
.get_mut(&name)
- .unwrap()
- .default_mode = mode_id.map(|m| m.to_string())
+ {
+ settings.default_mode = mode_id.map(|m| m.to_string())
+ }
});
}
@@ -6,8 +6,8 @@ use convert_case::{Case, Casing as _};
use fs::Fs;
use gpui::{App, SharedString};
use settings::{
- AgentProfileContent, ContextServerPresetContent, Settings as _, SettingsContent,
- update_settings_file,
+ AgentProfileContent, ContextServerPresetContent, LanguageModelSelection, Settings as _,
+ SettingsContent, update_settings_file,
};
use util::ResultExt as _;
@@ -53,19 +53,30 @@ impl AgentProfile {
let base_profile =
base_profile_id.and_then(|id| AgentSettings::get_global(cx).profiles.get(&id).cloned());
+ // Copy toggles from the base profile so the new profile starts with familiar defaults.
+ let tools = base_profile
+ .as_ref()
+ .map(|profile| profile.tools.clone())
+ .unwrap_or_default();
+ let enable_all_context_servers = base_profile
+ .as_ref()
+ .map(|profile| profile.enable_all_context_servers)
+ .unwrap_or_default();
+ let context_servers = base_profile
+ .as_ref()
+ .map(|profile| profile.context_servers.clone())
+ .unwrap_or_default();
+ // Preserve the base profile's model preference when cloning into a new profile.
+ let default_model = base_profile
+ .as_ref()
+ .and_then(|profile| profile.default_model.clone());
+
let profile_settings = AgentProfileSettings {
name: name.into(),
- tools: base_profile
- .as_ref()
- .map(|profile| profile.tools.clone())
- .unwrap_or_default(),
- enable_all_context_servers: base_profile
- .as_ref()
- .map(|profile| profile.enable_all_context_servers)
- .unwrap_or_default(),
- context_servers: base_profile
- .map(|profile| profile.context_servers)
- .unwrap_or_default(),
+ tools,
+ enable_all_context_servers,
+ context_servers,
+ default_model,
};
update_settings_file(fs, cx, {
@@ -96,6 +107,8 @@ pub struct AgentProfileSettings {
pub tools: IndexMap<Arc<str>, bool>,
pub enable_all_context_servers: bool,
pub context_servers: IndexMap<Arc<str>, ContextServerPreset>,
+ /// Default language model to apply when this profile becomes active.
+ pub default_model: Option<LanguageModelSelection>,
}
impl AgentProfileSettings {
@@ -144,6 +157,7 @@ impl AgentProfileSettings {
)
})
.collect(),
+ default_model: self.default_model.clone(),
},
);
@@ -153,15 +167,23 @@ impl AgentProfileSettings {
impl From<AgentProfileContent> for AgentProfileSettings {
fn from(content: AgentProfileContent) -> Self {
+ let AgentProfileContent {
+ name,
+ tools,
+ enable_all_context_servers,
+ context_servers,
+ default_model,
+ } = content;
+
Self {
- name: content.name.into(),
- tools: content.tools,
- enable_all_context_servers: content.enable_all_context_servers.unwrap_or_default(),
- context_servers: content
- .context_servers
+ name: name.into(),
+ tools,
+ enable_all_context_servers: enable_all_context_servers.unwrap_or_default(),
+ context_servers: context_servers
.into_iter()
.map(|(server_id, preset)| (server_id, preset.into()))
.collect(),
+ default_model,
}
}
}
@@ -694,14 +694,18 @@ fn build_symbol_label(symbol_name: &str, file_name: &str, line: u32, cx: &App) -
}
fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel {
- let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
+ let path = cx
+ .theme()
+ .syntax()
+ .highlight_id("variable")
+ .map(HighlightId);
let mut label = CodeLabelBuilder::default();
label.push_str(file_name, None);
label.push_str(" ", None);
if let Some(directory) = directory {
- label.push_str(directory, comment_id);
+ label.push_str(directory, path);
}
label.build()
@@ -15,6 +15,7 @@ use editor::{
EditorEvent, EditorMode, EditorSnapshot, EditorStyle, ExcerptId, FoldPlaceholder, Inlay,
MultiBuffer, ToOffset,
actions::Paste,
+ code_context_menus::CodeContextMenu,
display_map::{Crease, CreaseId, FoldId},
scroll::Autoscroll,
};
@@ -272,6 +273,15 @@ impl MessageEditor {
self.editor.read(cx).is_empty(cx)
}
+ pub fn is_completions_menu_visible(&self, cx: &App) -> bool {
+ self.editor
+ .read(cx)
+ .context_menu()
+ .borrow()
+ .as_ref()
+ .is_some_and(|menu| matches!(menu, CodeContextMenu::Completions(_)) && menu.visible())
+ }
+
pub fn mentions(&self) -> HashSet<MentionUri> {
self.mention_set
.mentions
@@ -836,6 +846,45 @@ impl MessageEditor {
cx.emit(MessageEditorEvent::Send)
}
+ pub fn trigger_completion_menu(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ let editor = self.editor.clone();
+
+ cx.spawn_in(window, async move |_, cx| {
+ editor
+ .update_in(cx, |editor, window, cx| {
+ let menu_is_open =
+ editor.context_menu().borrow().as_ref().is_some_and(|menu| {
+ matches!(menu, CodeContextMenu::Completions(_)) && menu.visible()
+ });
+
+ let has_at_sign = {
+ let snapshot = editor.display_snapshot(cx);
+ let cursor = editor.selections.newest::<text::Point>(&snapshot).head();
+ let offset = cursor.to_offset(&snapshot);
+ if offset > 0 {
+ snapshot
+ .buffer_snapshot()
+ .reversed_chars_at(offset)
+ .next()
+ .map(|sign| sign == '@')
+ .unwrap_or(false)
+ } else {
+ false
+ }
+ };
+
+ if menu_is_open && has_at_sign {
+ return;
+ }
+
+ editor.insert("@", window, cx);
+ editor.show_completions(&editor::actions::ShowCompletions, window, cx);
+ })
+ .log_err();
+ })
+ .detach();
+ }
+
fn chat(&mut self, _: &Chat, _: &mut Window, cx: &mut Context<Self>) {
self.send(cx);
}
@@ -1195,6 +1244,17 @@ impl MessageEditor {
self.editor.read(cx).text(cx)
}
+ pub fn set_placeholder_text(
+ &mut self,
+ placeholder: &str,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.editor.update(cx, |editor, cx| {
+ editor.set_placeholder_text(placeholder, window, cx);
+ });
+ }
+
#[cfg(test)]
pub fn set_text(&mut self, text: &str, window: &mut Window, cx: &mut Context<Self>) {
self.editor.update(cx, |editor, cx| {
@@ -457,25 +457,23 @@ impl Render for AcpThreadHistory {
.on_action(cx.listener(Self::select_last))
.on_action(cx.listener(Self::confirm))
.on_action(cx.listener(Self::remove_selected_thread))
- .when(!self.history_store.read(cx).is_empty(cx), |parent| {
- parent.child(
- h_flex()
- .h(px(41.)) // Match the toolbar perfectly
- .w_full()
- .py_1()
- .px_2()
- .gap_2()
- .justify_between()
- .border_b_1()
- .border_color(cx.theme().colors().border)
- .child(
- Icon::new(IconName::MagnifyingGlass)
- .color(Color::Muted)
- .size(IconSize::Small),
- )
- .child(self.search_editor.clone()),
- )
- })
+ .child(
+ h_flex()
+ .h(px(41.)) // Match the toolbar perfectly
+ .w_full()
+ .py_1()
+ .px_2()
+ .gap_2()
+ .justify_between()
+ .border_b_1()
+ .border_color(cx.theme().colors().border)
+ .child(
+ Icon::new(IconName::MagnifyingGlass)
+ .color(Color::Muted)
+ .size(IconSize::Small),
+ )
+ .child(self.search_editor.clone()),
+ )
.child({
let view = v_flex()
.id("list-container")
@@ -484,19 +482,15 @@ impl Render for AcpThreadHistory {
.flex_grow();
if self.history_store.read(cx).is_empty(cx) {
- view.justify_center()
- .child(
- h_flex().w_full().justify_center().child(
- Label::new("You don't have any past threads yet.")
- .size(LabelSize::Small),
- ),
- )
- } else if self.search_produced_no_matches() {
- view.justify_center().child(
- h_flex().w_full().justify_center().child(
- Label::new("No threads match your search.").size(LabelSize::Small),
- ),
+ view.justify_center().items_center().child(
+ Label::new("You don't have any past threads yet.")
+ .size(LabelSize::Small)
+ .color(Color::Muted),
)
+ } else if self.search_produced_no_matches() {
+ view.justify_center()
+ .items_center()
+ .child(Label::new("No threads match your search.").size(LabelSize::Small))
} else {
view.child(
uniform_list(
@@ -673,7 +667,7 @@ impl EntryTimeFormat {
timezone,
time_format::TimestampFormat::EnhancedAbsolute,
),
- EntryTimeFormat::TimeOnly => time_format::format_time(timestamp),
+ EntryTimeFormat::TimeOnly => time_format::format_time(timestamp.to_offset(timezone)),
}
}
}
@@ -125,8 +125,9 @@ impl ProfileProvider for Entity<agent::Thread> {
}
fn set_profile(&self, profile_id: AgentProfileId, cx: &mut App) {
- self.update(cx, |thread, _cx| {
- thread.set_profile(profile_id);
+ self.update(cx, |thread, cx| {
+ // Apply the profile and let the thread swap to its default model.
+ thread.set_profile(profile_id, cx);
});
}
@@ -336,19 +337,7 @@ impl AcpThreadView {
let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default()));
let available_commands = Rc::new(RefCell::new(vec![]));
- let placeholder = if agent.name() == "Zed Agent" {
- format!("Message the {} β @ to include context", agent.name())
- } else if agent.name() == "Claude Code"
- || agent.name() == "Codex"
- || !available_commands.borrow().is_empty()
- {
- format!(
- "Message {} β @ to include context, / for commands",
- agent.name()
- )
- } else {
- format!("Message {} β @ to include context", agent.name())
- };
+ let placeholder = placeholder_text(agent.name().as_ref(), false);
let message_editor = cx.new(|cx| {
let mut editor = MessageEditor::new(
@@ -1455,7 +1444,14 @@ impl AcpThreadView {
});
}
+ let has_commands = !available_commands.is_empty();
self.available_commands.replace(available_commands);
+
+ let new_placeholder = placeholder_text(self.agent.name().as_ref(), has_commands);
+
+ self.message_editor.update(cx, |editor, cx| {
+ editor.set_placeholder_text(&new_placeholder, window, cx);
+ });
}
AcpThreadEvent::ModeUpdated(_mode) => {
// The connection keeps track of the mode
@@ -4192,6 +4188,8 @@ impl AcpThreadView {
.justify_between()
.child(
h_flex()
+ .gap_0p5()
+ .child(self.render_add_context_button(cx))
.child(self.render_follow_toggle(cx))
.children(self.render_burn_mode_toggle(cx)),
)
@@ -4506,6 +4504,29 @@ impl AcpThreadView {
}))
}
+ fn render_add_context_button(&self, cx: &mut Context<Self>) -> impl IntoElement {
+ let message_editor = self.message_editor.clone();
+ let menu_visible = message_editor.read(cx).is_completions_menu_visible(cx);
+
+ IconButton::new("add-context", IconName::AtSign)
+ .icon_size(IconSize::Small)
+ .icon_color(Color::Muted)
+ .when(!menu_visible, |this| {
+ this.tooltip(move |_window, cx| {
+ Tooltip::with_meta("Add Context", None, "Or type @ to include context", cx)
+ })
+ })
+ .on_click(cx.listener(move |_this, _, window, cx| {
+ let message_editor_clone = message_editor.clone();
+
+ window.defer(cx, move |window, cx| {
+ message_editor_clone.update(cx, |message_editor, cx| {
+ message_editor.trigger_completion_menu(window, cx);
+ });
+ });
+ }))
+ }
+
fn render_markdown(&self, markdown: Entity<Markdown>, style: MarkdownStyle) -> MarkdownElement {
let workspace = self.workspace.clone();
MarkdownElement::new(markdown, style).on_url_click(move |text, window, cx| {
@@ -5707,6 +5728,19 @@ fn loading_contents_spinner(size: IconSize) -> AnyElement {
.into_any_element()
}
+fn placeholder_text(agent_name: &str, has_commands: bool) -> String {
+ if agent_name == "Zed Agent" {
+ format!("Message the {} β @ to include context", agent_name)
+ } else if has_commands {
+ format!(
+ "Message {} β @ to include context, / for commands",
+ agent_name
+ )
+ } else {
+ format!("Message {} β @ to include context", agent_name)
+ }
+}
+
impl Focusable for AcpThreadView {
fn focus_handle(&self, cx: &App) -> FocusHandle {
match self.thread_state {
@@ -8,6 +8,7 @@ use std::{ops::Range, sync::Arc};
use agent::ContextServerRegistry;
use anyhow::Result;
+use client::zed_urls;
use cloud_llm_client::{Plan, PlanV1, PlanV2};
use collections::HashMap;
use context_server::ContextServerId;
@@ -26,18 +27,20 @@ use language_model::{
use language_models::AllLanguageModelSettings;
use notifications::status_toast::{StatusToast, ToastIcon};
use project::{
- agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME},
+ agent_server_store::{
+ AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, ExternalAgentServerName, GEMINI_NAME,
+ },
context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
};
use settings::{Settings, SettingsStore, update_settings_file};
use ui::{
- Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor,
- ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize, PopoverMenu, Switch,
- SwitchColor, Tooltip, WithScrollbar, prelude::*,
+ Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, ContextMenuEntry, Disclosure,
+ Divider, DividerColor, ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize,
+ PopoverMenu, Switch, SwitchColor, Tooltip, WithScrollbar, prelude::*,
};
use util::ResultExt as _;
use workspace::{Workspace, create_and_open_local_file};
-use zed_actions::ExtensionCategoryFilter;
+use zed_actions::{ExtensionCategoryFilter, OpenBrowser};
pub(crate) use configure_context_server_modal::ConfigureContextServerModal;
pub(crate) use configure_context_server_tools_modal::ConfigureContextServerToolsModal;
@@ -415,6 +418,7 @@ impl AgentConfiguration {
cx: &mut Context<Self>,
) -> impl IntoElement {
let providers = LanguageModelRegistry::read_global(cx).providers();
+
let popover_menu = PopoverMenu::new("add-provider-popover")
.trigger(
Button::new("add-provider", "Add Provider")
@@ -425,7 +429,6 @@ impl AgentConfiguration {
.icon_color(Color::Muted)
.label_size(LabelSize::Small),
)
- .anchor(gpui::Corner::TopRight)
.menu({
let workspace = self.workspace.clone();
move |window, cx| {
@@ -447,6 +450,11 @@ impl AgentConfiguration {
})
}))
}
+ })
+ .anchor(gpui::Corner::TopRight)
+ .offset(gpui::Point {
+ x: px(0.0),
+ y: px(2.0),
});
v_flex()
@@ -541,7 +549,6 @@ impl AgentConfiguration {
.icon_color(Color::Muted)
.label_size(LabelSize::Small),
)
- .anchor(gpui::Corner::TopRight)
.menu({
move |window, cx| {
Some(ContextMenu::build(window, cx, |menu, _window, _cx| {
@@ -564,6 +571,11 @@ impl AgentConfiguration {
})
}))
}
+ })
+ .anchor(gpui::Corner::TopRight)
+ .offset(gpui::Point {
+ x: px(0.0),
+ y: px(2.0),
});
v_flex()
@@ -638,15 +650,13 @@ impl AgentConfiguration {
let is_running = matches!(server_status, ContextServerStatus::Running);
let item_id = SharedString::from(context_server_id.0.clone());
- let is_from_extension = server_configuration
- .as_ref()
- .map(|config| {
- matches!(
- config.as_ref(),
- ContextServerConfiguration::Extension { .. }
- )
- })
- .unwrap_or(false);
+ // Servers without a configuration can only be provided by extensions.
+ let provided_by_extension = server_configuration.is_none_or(|config| {
+ matches!(
+ config.as_ref(),
+ ContextServerConfiguration::Extension { .. }
+ )
+ });
let error = if let ContextServerStatus::Error(error) = server_status.clone() {
Some(error)
@@ -660,7 +670,7 @@ impl AgentConfiguration {
.tools_for_server(&context_server_id)
.count();
- let (source_icon, source_tooltip) = if is_from_extension {
+ let (source_icon, source_tooltip) = if provided_by_extension {
(
IconName::ZedSrcExtension,
"This MCP server was installed from an extension.",
@@ -710,7 +720,6 @@ impl AgentConfiguration {
let fs = self.fs.clone();
let context_server_id = context_server_id.clone();
let language_registry = self.language_registry.clone();
- let context_server_store = self.context_server_store.clone();
let workspace = self.workspace.clone();
let context_server_registry = self.context_server_registry.clone();
@@ -752,23 +761,10 @@ impl AgentConfiguration {
.entry("Uninstall", None, {
let fs = fs.clone();
let context_server_id = context_server_id.clone();
- let context_server_store = context_server_store.clone();
let workspace = workspace.clone();
move |_, cx| {
- let is_provided_by_extension = context_server_store
- .read(cx)
- .configuration_for_server(&context_server_id)
- .as_ref()
- .map(|config| {
- matches!(
- config.as_ref(),
- ContextServerConfiguration::Extension { .. }
- )
- })
- .unwrap_or(false);
-
let uninstall_extension_task = match (
- is_provided_by_extension,
+ provided_by_extension,
resolve_extension_for_context_server(&context_server_id, cx),
) {
(true, Some((id, manifest))) => {
@@ -959,7 +955,7 @@ impl AgentConfiguration {
.cloned()
.collect::<Vec<_>>();
- let user_defined_agents = user_defined_agents
+ let user_defined_agents: Vec<_> = user_defined_agents
.into_iter()
.map(|name| {
let icon = if let Some(icon_path) = agent_server_store.agent_icon(&name) {
@@ -967,27 +963,93 @@ impl AgentConfiguration {
} else {
AgentIcon::Name(IconName::Ai)
};
- self.render_agent_server(icon, name, true)
- .into_any_element()
+ (name, icon)
})
- .collect::<Vec<_>>();
+ .collect();
- let add_agens_button = Button::new("add-agent", "Add Agent")
- .style(ButtonStyle::Outlined)
- .icon_position(IconPosition::Start)
- .icon(IconName::Plus)
- .icon_size(IconSize::Small)
- .icon_color(Color::Muted)
- .label_size(LabelSize::Small)
- .on_click(move |_, window, cx| {
- if let Some(workspace) = window.root().flatten() {
- let workspace = workspace.downgrade();
- window
- .spawn(cx, async |cx| {
- open_new_agent_servers_entry_in_settings_editor(workspace, cx).await
+ let add_agent_popover = PopoverMenu::new("add-agent-server-popover")
+ .trigger(
+ Button::new("add-agent", "Add Agent")
+ .style(ButtonStyle::Outlined)
+ .icon_position(IconPosition::Start)
+ .icon(IconName::Plus)
+ .icon_size(IconSize::Small)
+ .icon_color(Color::Muted)
+ .label_size(LabelSize::Small),
+ )
+ .menu({
+ move |window, cx| {
+ Some(ContextMenu::build(window, cx, |menu, _window, _cx| {
+ menu.entry("Install from Extensions", None, {
+ |window, cx| {
+ window.dispatch_action(
+ zed_actions::Extensions {
+ category_filter: Some(
+ ExtensionCategoryFilter::AgentServers,
+ ),
+ id: None,
+ }
+ .boxed_clone(),
+ cx,
+ )
+ }
})
- .detach_and_log_err(cx);
+ .entry("Add Custom Agent", None, {
+ move |window, cx| {
+ if let Some(workspace) = window.root().flatten() {
+ let workspace = workspace.downgrade();
+ window
+ .spawn(cx, async |cx| {
+ open_new_agent_servers_entry_in_settings_editor(
+ workspace, cx,
+ )
+ .await
+ })
+ .detach_and_log_err(cx);
+ }
+ }
+ })
+ .separator()
+ .header("Learn More")
+ .item(
+ ContextMenuEntry::new("Agent Servers Docs")
+ .icon(IconName::ArrowUpRight)
+ .icon_color(Color::Muted)
+ .icon_position(IconPosition::End)
+ .handler({
+ move |window, cx| {
+ window.dispatch_action(
+ Box::new(OpenBrowser {
+ url: zed_urls::agent_server_docs(cx),
+ }),
+ cx,
+ );
+ }
+ }),
+ )
+ .item(
+ ContextMenuEntry::new("ACP Docs")
+ .icon(IconName::ArrowUpRight)
+ .icon_color(Color::Muted)
+ .icon_position(IconPosition::End)
+ .handler({
+ move |window, cx| {
+ window.dispatch_action(
+ Box::new(OpenBrowser {
+ url: "https://agentclientprotocol.com/".into(),
+ }),
+ cx,
+ );
+ }
+ }),
+ )
+ }))
}
+ })
+ .anchor(gpui::Corner::TopRight)
+ .offset(gpui::Point {
+ x: px(0.0),
+ y: px(2.0),
});
v_flex()
@@ -998,7 +1060,7 @@ impl AgentConfiguration {
.child(self.render_section_title(
"External Agents",
"All agents connected through the Agent Client Protocol.",
- add_agens_button.into_any_element(),
+ add_agent_popover.into_any_element(),
))
.child(
v_flex()
@@ -1009,26 +1071,29 @@ impl AgentConfiguration {
AgentIcon::Name(IconName::AiClaude),
"Claude Code",
false,
+ cx,
))
.child(Divider::horizontal().color(DividerColor::BorderFaded))
.child(self.render_agent_server(
AgentIcon::Name(IconName::AiOpenAi),
"Codex CLI",
false,
+ cx,
))
.child(Divider::horizontal().color(DividerColor::BorderFaded))
.child(self.render_agent_server(
AgentIcon::Name(IconName::AiGemini),
"Gemini CLI",
false,
+ cx,
))
.map(|mut parent| {
- for agent in user_defined_agents {
+ for (name, icon) in user_defined_agents {
parent = parent
.child(
Divider::horizontal().color(DividerColor::BorderFaded),
)
- .child(agent);
+ .child(self.render_agent_server(icon, name, true, cx));
}
parent
}),
@@ -1041,6 +1106,7 @@ impl AgentConfiguration {
icon: AgentIcon,
name: impl Into<SharedString>,
external: bool,
+ cx: &mut Context<Self>,
) -> impl IntoElement {
let name = name.into();
let icon = match icon {
@@ -1055,28 +1121,53 @@ impl AgentConfiguration {
let tooltip_id = SharedString::new(format!("agent-source-{}", name));
let tooltip_message = format!("The {} agent was installed from an extension.", name);
+ let agent_server_name = ExternalAgentServerName(name.clone());
+
+ let uninstall_btn_id = SharedString::from(format!("uninstall-{}", name));
+ let uninstall_button = IconButton::new(uninstall_btn_id, IconName::Trash)
+ .icon_color(Color::Muted)
+ .icon_size(IconSize::Small)
+ .tooltip(Tooltip::text("Uninstall Agent Extension"))
+ .on_click(cx.listener(move |this, _, _window, cx| {
+ let agent_name = agent_server_name.clone();
+
+ if let Some(ext_id) = this.agent_server_store.update(cx, |store, _cx| {
+ store.get_extension_id_for_agent(&agent_name)
+ }) {
+ ExtensionStore::global(cx)
+ .update(cx, |store, cx| store.uninstall_extension(ext_id, cx))
+ .detach_and_log_err(cx);
+ }
+ }));
+
h_flex()
- .gap_1p5()
- .child(icon)
- .child(Label::new(name))
- .when(external, |this| {
- this.child(
- div()
- .id(tooltip_id)
- .flex_none()
- .tooltip(Tooltip::text(tooltip_message))
- .child(
- Icon::new(IconName::ZedSrcExtension)
- .size(IconSize::Small)
- .color(Color::Muted),
- ),
- )
- })
+ .gap_1()
+ .justify_between()
.child(
- Icon::new(IconName::Check)
- .color(Color::Success)
- .size(IconSize::Small),
+ h_flex()
+ .gap_1p5()
+ .child(icon)
+ .child(Label::new(name))
+ .when(external, |this| {
+ this.child(
+ div()
+ .id(tooltip_id)
+ .flex_none()
+ .tooltip(Tooltip::text(tooltip_message))
+ .child(
+ Icon::new(IconName::ZedSrcExtension)
+ .size(IconSize::Small)
+ .color(Color::Muted),
+ ),
+ )
+ })
+ .child(
+ Icon::new(IconName::Check)
+ .color(Color::Success)
+ .size(IconSize::Small),
+ ),
)
+ .when(external, |this| this.child(uninstall_button))
}
}
@@ -7,8 +7,10 @@ use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, builtin_profil
use editor::Editor;
use fs::Fs;
use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, prelude::*};
-use language_model::LanguageModel;
-use settings::Settings as _;
+use language_model::{LanguageModel, LanguageModelRegistry};
+use settings::{
+ LanguageModelProviderSetting, LanguageModelSelection, Settings as _, update_settings_file,
+};
use ui::{
KeyBinding, ListItem, ListItemSpacing, ListSeparator, Navigable, NavigableEntry, prelude::*,
};
@@ -16,6 +18,7 @@ use workspace::{ModalView, Workspace};
use crate::agent_configuration::manage_profiles_modal::profile_modal_header::ProfileModalHeader;
use crate::agent_configuration::tool_picker::{ToolPicker, ToolPickerDelegate};
+use crate::language_model_selector::{LanguageModelSelector, language_model_selector};
use crate::{AgentPanel, ManageProfiles};
enum Mode {
@@ -32,6 +35,11 @@ enum Mode {
tool_picker: Entity<ToolPicker>,
_subscription: Subscription,
},
+ ConfigureDefaultModel {
+ profile_id: AgentProfileId,
+ model_picker: Entity<LanguageModelSelector>,
+ _subscription: Subscription,
+ },
}
impl Mode {
@@ -83,6 +91,7 @@ pub struct ChooseProfileMode {
pub struct ViewProfileMode {
profile_id: AgentProfileId,
fork_profile: NavigableEntry,
+ configure_default_model: NavigableEntry,
configure_tools: NavigableEntry,
configure_mcps: NavigableEntry,
cancel_item: NavigableEntry,
@@ -180,6 +189,7 @@ impl ManageProfilesModal {
self.mode = Mode::ViewProfile(ViewProfileMode {
profile_id,
fork_profile: NavigableEntry::focusable(cx),
+ configure_default_model: NavigableEntry::focusable(cx),
configure_tools: NavigableEntry::focusable(cx),
configure_mcps: NavigableEntry::focusable(cx),
cancel_item: NavigableEntry::focusable(cx),
@@ -187,6 +197,83 @@ impl ManageProfilesModal {
self.focus_handle(cx).focus(window);
}
+ fn configure_default_model(
+ &mut self,
+ profile_id: AgentProfileId,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let fs = self.fs.clone();
+ let profile_id_for_closure = profile_id.clone();
+
+ let model_picker = cx.new(|cx| {
+ let fs = fs.clone();
+ let profile_id = profile_id_for_closure.clone();
+
+ language_model_selector(
+ {
+ let profile_id = profile_id.clone();
+ move |cx| {
+ let settings = AgentSettings::get_global(cx);
+
+ settings
+ .profiles
+ .get(&profile_id)
+ .and_then(|profile| profile.default_model.as_ref())
+ .and_then(|selection| {
+ let registry = LanguageModelRegistry::read_global(cx);
+ let provider_id = language_model::LanguageModelProviderId(
+ gpui::SharedString::from(selection.provider.0.clone()),
+ );
+ let provider = registry.provider(&provider_id)?;
+ let model = provider
+ .provided_models(cx)
+ .iter()
+ .find(|m| m.id().0 == selection.model.as_str())?
+ .clone();
+ Some(language_model::ConfiguredModel { provider, model })
+ })
+ }
+ },
+ move |model, cx| {
+ let provider = model.provider_id().0.to_string();
+ let model_id = model.id().0.to_string();
+ let profile_id = profile_id.clone();
+
+ update_settings_file(fs.clone(), cx, move |settings, _cx| {
+ let agent_settings = settings.agent.get_or_insert_default();
+ if let Some(profiles) = agent_settings.profiles.as_mut() {
+ if let Some(profile) = profiles.get_mut(profile_id.0.as_ref()) {
+ profile.default_model = Some(LanguageModelSelection {
+ provider: LanguageModelProviderSetting(provider.clone()),
+ model: model_id.clone(),
+ });
+ }
+ }
+ });
+ },
+ false, // Do not use popover styles for the model picker
+ window,
+ cx,
+ )
+ .modal(false)
+ });
+
+ let dismiss_subscription = cx.subscribe_in(&model_picker, window, {
+ let profile_id = profile_id.clone();
+ move |this, _picker, _: &DismissEvent, window, cx| {
+ this.view_profile(profile_id.clone(), window, cx);
+ }
+ });
+
+ self.mode = Mode::ConfigureDefaultModel {
+ profile_id,
+ model_picker,
+ _subscription: dismiss_subscription,
+ };
+ self.focus_handle(cx).focus(window);
+ }
+
fn configure_mcp_tools(
&mut self,
profile_id: AgentProfileId,
@@ -277,6 +364,7 @@ impl ManageProfilesModal {
Mode::ViewProfile(_) => {}
Mode::ConfigureTools { .. } => {}
Mode::ConfigureMcps { .. } => {}
+ Mode::ConfigureDefaultModel { .. } => {}
}
}
@@ -299,6 +387,9 @@ impl ManageProfilesModal {
Mode::ConfigureMcps { profile_id, .. } => {
self.view_profile(profile_id.clone(), window, cx)
}
+ Mode::ConfigureDefaultModel { profile_id, .. } => {
+ self.view_profile(profile_id.clone(), window, cx)
+ }
}
}
}
@@ -313,6 +404,7 @@ impl Focusable for ManageProfilesModal {
Mode::ViewProfile(_) => self.focus_handle.clone(),
Mode::ConfigureTools { tool_picker, .. } => tool_picker.focus_handle(cx),
Mode::ConfigureMcps { tool_picker, .. } => tool_picker.focus_handle(cx),
+ Mode::ConfigureDefaultModel { model_picker, .. } => model_picker.focus_handle(cx),
}
}
}
@@ -544,6 +636,47 @@ impl ManageProfilesModal {
}),
),
)
+ .child(
+ div()
+ .id("configure-default-model")
+ .track_focus(&mode.configure_default_model.focus_handle)
+ .on_action({
+ let profile_id = mode.profile_id.clone();
+ cx.listener(move |this, _: &menu::Confirm, window, cx| {
+ this.configure_default_model(
+ profile_id.clone(),
+ window,
+ cx,
+ );
+ })
+ })
+ .child(
+ ListItem::new("model-item")
+ .toggle_state(
+ mode.configure_default_model
+ .focus_handle
+ .contains_focused(window, cx),
+ )
+ .inset(true)
+ .spacing(ListItemSpacing::Sparse)
+ .start_slot(
+ Icon::new(IconName::ZedAssistant)
+ .size(IconSize::Small)
+ .color(Color::Muted),
+ )
+ .child(Label::new("Configure Default Model"))
+ .on_click({
+ let profile_id = mode.profile_id.clone();
+ cx.listener(move |this, _, window, cx| {
+ this.configure_default_model(
+ profile_id.clone(),
+ window,
+ cx,
+ );
+ })
+ }),
+ ),
+ )
.child(
div()
.id("configure-builtin-tools")
@@ -668,6 +801,7 @@ impl ManageProfilesModal {
.into_any_element(),
)
.entry(mode.fork_profile)
+ .entry(mode.configure_default_model)
.entry(mode.configure_tools)
.entry(mode.configure_mcps)
.entry(mode.cancel_item)
@@ -753,6 +887,29 @@ impl Render for ManageProfilesModal {
.child(go_back_item)
.into_any_element()
}
+ Mode::ConfigureDefaultModel {
+ profile_id,
+ model_picker,
+ ..
+ } => {
+ let profile_name = settings
+ .profiles
+ .get(profile_id)
+ .map(|profile| profile.name.clone())
+ .unwrap_or_else(|| "Unknown".into());
+
+ v_flex()
+ .pb_1()
+ .child(ProfileModalHeader::new(
+ format!("{profile_name} β Configure Default Model"),
+ Some(IconName::Ai),
+ ))
+ .child(ListSeparator)
+ .child(v_flex().w(rems(34.)).child(model_picker.clone()))
+ .child(ListSeparator)
+ .child(go_back_item)
+ .into_any_element()
+ }
Mode::ConfigureMcps {
profile_id,
tool_picker,
@@ -314,6 +314,7 @@ impl PickerDelegate for ToolPickerDelegate {
)
})
.collect(),
+ default_model: default_profile.default_model.clone(),
});
if let Some(server_id) = server_id {
@@ -47,6 +47,7 @@ impl AgentModelSelector {
}
}
},
+ true, // Use popover styles for picker
window,
cx,
)
@@ -19,14 +19,26 @@ pub type LanguageModelSelector = Picker<LanguageModelPickerDelegate>;
pub fn language_model_selector(
get_active_model: impl Fn(&App) -> Option<ConfiguredModel> + 'static,
on_model_changed: impl Fn(Arc<dyn LanguageModel>, &mut App) + 'static,
+ popover_styles: bool,
window: &mut Window,
cx: &mut Context<LanguageModelSelector>,
) -> LanguageModelSelector {
- let delegate = LanguageModelPickerDelegate::new(get_active_model, on_model_changed, window, cx);
- Picker::list(delegate, window, cx)
- .show_scrollbar(true)
- .width(rems(20.))
- .max_height(Some(rems(20.).into()))
+ let delegate = LanguageModelPickerDelegate::new(
+ get_active_model,
+ on_model_changed,
+ popover_styles,
+ window,
+ cx,
+ );
+
+ if popover_styles {
+ Picker::list(delegate, window, cx)
+ .show_scrollbar(true)
+ .width(rems(20.))
+ .max_height(Some(rems(20.).into()))
+ } else {
+ Picker::list(delegate, window, cx).show_scrollbar(true)
+ }
}
fn all_models(cx: &App) -> GroupedModels {
@@ -75,12 +87,14 @@ pub struct LanguageModelPickerDelegate {
selected_index: usize,
_authenticate_all_providers_task: Task<()>,
_subscriptions: Vec<Subscription>,
+ popover_styles: bool,
}
impl LanguageModelPickerDelegate {
fn new(
get_active_model: impl Fn(&App) -> Option<ConfiguredModel> + 'static,
on_model_changed: impl Fn(Arc<dyn LanguageModel>, &mut App) + 'static,
+ popover_styles: bool,
window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Self {
@@ -113,6 +127,7 @@ impl LanguageModelPickerDelegate {
}
},
)],
+ popover_styles,
}
}
@@ -530,6 +545,10 @@ impl PickerDelegate for LanguageModelPickerDelegate {
_window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Option<gpui::AnyElement> {
+ if !self.popover_styles {
+ return None;
+ }
+
Some(
h_flex()
.w_full()
@@ -15,8 +15,8 @@ use std::{
sync::{Arc, atomic::AtomicBool},
};
use ui::{
- DocumentationAside, DocumentationEdge, DocumentationSide, HighlightedLabel, LabelSize,
- ListItem, ListItemSpacing, PopoverMenuHandle, TintColor, Tooltip, prelude::*,
+ DocumentationAside, DocumentationEdge, DocumentationSide, HighlightedLabel, KeyBinding,
+ LabelSize, ListItem, ListItemSpacing, PopoverMenuHandle, TintColor, Tooltip, prelude::*,
};
/// Trait for types that can provide and manage agent profiles
@@ -81,6 +81,7 @@ impl ProfileSelector {
self.provider.clone(),
self.profiles.clone(),
cx.background_executor().clone(),
+ self.focus_handle.clone(),
cx,
);
@@ -207,6 +208,7 @@ pub(crate) struct ProfilePickerDelegate {
selected_index: usize,
query: String,
cancel: Option<Arc<AtomicBool>>,
+ focus_handle: FocusHandle,
}
impl ProfilePickerDelegate {
@@ -215,6 +217,7 @@ impl ProfilePickerDelegate {
provider: Arc<dyn ProfileProvider>,
profiles: AvailableProfiles,
background: BackgroundExecutor,
+ focus_handle: FocusHandle,
cx: &mut Context<ProfileSelector>,
) -> Self {
let candidates = Self::candidates_from(profiles);
@@ -231,6 +234,7 @@ impl ProfilePickerDelegate {
selected_index: 0,
query: String::new(),
cancel: None,
+ focus_handle,
};
this.selected_index = this
@@ -594,20 +598,26 @@ impl PickerDelegate for ProfilePickerDelegate {
_: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Option<gpui::AnyElement> {
+ let focus_handle = self.focus_handle.clone();
+
Some(
h_flex()
.w_full()
.border_t_1()
.border_color(cx.theme().colors().border_variant)
- .p_1()
- .gap_4()
- .justify_between()
+ .p_1p5()
.child(
Button::new("configure", "Configure")
- .icon(IconName::Settings)
- .icon_size(IconSize::Small)
- .icon_color(Color::Muted)
- .icon_position(IconPosition::Start)
+ .full_width()
+ .style(ButtonStyle::Outlined)
+ .key_binding(
+ KeyBinding::for_action_in(
+ &ManageProfiles::default(),
+ &focus_handle,
+ cx,
+ )
+ .map(|kb| kb.size(rems_from_px(12.))),
+ )
.on_click(|_, window, cx| {
window.dispatch_action(ManageProfiles::default().boxed_clone(), cx);
}),
@@ -659,20 +669,25 @@ mod tests {
is_builtin: true,
}];
- let delegate = ProfilePickerDelegate {
- fs: FakeFs::new(cx.executor()),
- provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
- background: cx.executor(),
- candidates,
- string_candidates: Arc::new(Vec::new()),
- filtered_entries: Vec::new(),
- selected_index: 0,
- query: String::new(),
- cancel: None,
- };
-
- let matches = Vec::new(); // No matches
- let _entries = delegate.entries_from_matches(matches);
+ cx.update(|cx| {
+ let focus_handle = cx.focus_handle();
+
+ let delegate = ProfilePickerDelegate {
+ fs: FakeFs::new(cx.background_executor().clone()),
+ provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
+ background: cx.background_executor().clone(),
+ candidates,
+ string_candidates: Arc::new(Vec::new()),
+ filtered_entries: Vec::new(),
+ selected_index: 0,
+ query: String::new(),
+ cancel: None,
+ focus_handle,
+ };
+
+ let matches = Vec::new(); // No matches
+ let _entries = delegate.entries_from_matches(matches);
+ });
}
#[gpui::test]
@@ -690,30 +705,35 @@ mod tests {
},
];
- let delegate = ProfilePickerDelegate {
- fs: FakeFs::new(cx.executor()),
- provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
- background: cx.executor(),
- candidates,
- string_candidates: Arc::new(Vec::new()),
- filtered_entries: vec![
- ProfilePickerEntry::Profile(ProfileMatchEntry {
- candidate_index: 0,
- positions: Vec::new(),
- }),
- ProfilePickerEntry::Profile(ProfileMatchEntry {
- candidate_index: 1,
- positions: Vec::new(),
- }),
- ],
- selected_index: 0,
- query: String::new(),
- cancel: None,
- };
-
- // Active profile should be found at index 0
- let active_index = delegate.index_of_profile(&AgentProfileId("write".into()));
- assert_eq!(active_index, Some(0));
+ cx.update(|cx| {
+ let focus_handle = cx.focus_handle();
+
+ let delegate = ProfilePickerDelegate {
+ fs: FakeFs::new(cx.background_executor().clone()),
+ provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
+ background: cx.background_executor().clone(),
+ candidates,
+ string_candidates: Arc::new(Vec::new()),
+ filtered_entries: vec![
+ ProfilePickerEntry::Profile(ProfileMatchEntry {
+ candidate_index: 0,
+ positions: Vec::new(),
+ }),
+ ProfilePickerEntry::Profile(ProfileMatchEntry {
+ candidate_index: 1,
+ positions: Vec::new(),
+ }),
+ ],
+ selected_index: 0,
+ query: String::new(),
+ cancel: None,
+ focus_handle,
+ };
+
+ // Active profile should be found at index 0
+ let active_index = delegate.index_of_profile(&AgentProfileId("write".into()));
+ assert_eq!(active_index, Some(0));
+ });
}
struct TestProfileProvider {
@@ -314,6 +314,7 @@ impl TextThreadEditor {
)
});
},
+ true, // Use popover styles for picker
window,
cx,
)
@@ -477,7 +478,7 @@ impl TextThreadEditor {
editor.insert(&format!("/{name}"), window, cx);
if command.accepts_arguments() {
editor.insert(" ", window, cx);
- editor.show_completions(&ShowCompletions::default(), window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
}
});
});
@@ -33,4 +33,9 @@ workspace.workspace = true
which.workspace = true
[dev-dependencies]
+ctor.workspace = true
+clock= { workspace = true, "features" = ["test-support"] }
+futures.workspace = true
gpui = { workspace = true, "features" = ["test-support"] }
+parking_lot.workspace = true
+zlog.workspace = true
@@ -1,12 +1,11 @@
use anyhow::{Context as _, Result};
-use client::{Client, TelemetrySettings};
-use db::RELEASE_CHANNEL;
+use client::Client;
use db::kvp::KEY_VALUE_STORE;
use gpui::{
App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, Global, SemanticVersion,
Task, Window, actions,
};
-use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
+use http_client::{HttpClient, HttpClientWithUrl};
use paths::remote_servers_dir;
use release_channel::{AppCommitSha, ReleaseChannel};
use serde::{Deserialize, Serialize};
@@ -41,22 +40,23 @@ actions!(
]
);
-#[derive(Serialize)]
-struct UpdateRequestBody {
- installation_id: Option<Arc<str>>,
- release_channel: Option<&'static str>,
- telemetry: bool,
- is_staff: Option<bool>,
- destination: &'static str,
-}
-
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum VersionCheckType {
Sha(AppCommitSha),
Semantic(SemanticVersion),
}
-#[derive(Clone)]
+#[derive(Serialize, Debug)]
+pub struct AssetQuery<'a> {
+ asset: &'a str,
+ os: &'a str,
+ arch: &'a str,
+ metrics_id: Option<&'a str>,
+ system_id: Option<&'a str>,
+ is_staff: Option<bool>,
+}
+
+#[derive(Clone, Debug)]
pub enum AutoUpdateStatus {
Idle,
Checking,
@@ -66,6 +66,31 @@ pub enum AutoUpdateStatus {
Errored { error: Arc<anyhow::Error> },
}
+impl PartialEq for AutoUpdateStatus {
+ fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (AutoUpdateStatus::Idle, AutoUpdateStatus::Idle) => true,
+ (AutoUpdateStatus::Checking, AutoUpdateStatus::Checking) => true,
+ (
+ AutoUpdateStatus::Downloading { version: v1 },
+ AutoUpdateStatus::Downloading { version: v2 },
+ ) => v1 == v2,
+ (
+ AutoUpdateStatus::Installing { version: v1 },
+ AutoUpdateStatus::Installing { version: v2 },
+ ) => v1 == v2,
+ (
+ AutoUpdateStatus::Updated { version: v1 },
+ AutoUpdateStatus::Updated { version: v2 },
+ ) => v1 == v2,
+ (AutoUpdateStatus::Errored { error: e1 }, AutoUpdateStatus::Errored { error: e2 }) => {
+ e1.to_string() == e2.to_string()
+ }
+ _ => false,
+ }
+ }
+}
+
impl AutoUpdateStatus {
pub fn is_updated(&self) -> bool {
matches!(self, Self::Updated { .. })
@@ -75,13 +100,13 @@ impl AutoUpdateStatus {
pub struct AutoUpdater {
status: AutoUpdateStatus,
current_version: SemanticVersion,
- http_client: Arc<HttpClientWithUrl>,
+ client: Arc<Client>,
pending_poll: Option<Task<Option<()>>>,
quit_subscription: Option<gpui::Subscription>,
}
-#[derive(Deserialize, Clone, Debug)]
-pub struct JsonRelease {
+#[derive(Deserialize, Serialize, Clone, Debug)]
+pub struct ReleaseAsset {
pub version: String,
pub url: String,
}
@@ -137,7 +162,7 @@ struct GlobalAutoUpdate(Option<Entity<AutoUpdater>>);
impl Global for GlobalAutoUpdate {}
-pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
+pub fn init(client: Arc<Client>, cx: &mut App) {
cx.observe_new(|workspace: &mut Workspace, _window, _cx| {
workspace.register_action(|_, action, window, cx| check(action, window, cx));
@@ -149,7 +174,7 @@ pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
let version = release_channel::AppVersion::global(cx);
let auto_updater = cx.new(|cx| {
- let updater = AutoUpdater::new(version, http_client, cx);
+ let updater = AutoUpdater::new(version, client, cx);
let poll_for_updates = ReleaseChannel::try_global(cx)
.map(|channel| channel.poll_for_updates())
@@ -233,7 +258,7 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut App) -> Option<()> {
let current_version = auto_updater.current_version;
let release_channel = release_channel.dev_name();
let path = format!("/releases/{release_channel}/{current_version}");
- let url = &auto_updater.http_client.build_url(&path);
+ let url = &auto_updater.client.http_client().build_url(&path);
cx.open_url(url);
}
ReleaseChannel::Nightly => {
@@ -296,11 +321,7 @@ impl AutoUpdater {
cx.default_global::<GlobalAutoUpdate>().0.clone()
}
- fn new(
- current_version: SemanticVersion,
- http_client: Arc<HttpClientWithUrl>,
- cx: &mut Context<Self>,
- ) -> Self {
+ fn new(current_version: SemanticVersion, client: Arc<Client>, cx: &mut Context<Self>) -> Self {
// On windows, executable files cannot be overwritten while they are
// running, so we must wait to overwrite the application until quitting
// or restarting. When quitting the app, we spawn the auto update helper
@@ -321,7 +342,7 @@ impl AutoUpdater {
Self {
status: AutoUpdateStatus::Idle,
current_version,
- http_client,
+ client,
pending_poll: None,
quit_subscription,
}
@@ -329,8 +350,7 @@ impl AutoUpdater {
pub fn start_polling(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
cx.spawn(async move |this, cx| {
- #[cfg(target_os = "windows")]
- {
+ if cfg!(target_os = "windows") {
use util::ResultExt;
cleanup_windows()
@@ -354,7 +374,7 @@ impl AutoUpdater {
cx.notify();
self.pending_poll = Some(cx.spawn(async move |this, cx| {
- let result = Self::update(this.upgrade()?, cx.clone()).await;
+ let result = Self::update(this.upgrade()?, cx).await;
this.update(cx, |this, cx| {
this.pending_poll = None;
if let Err(error) = result {
@@ -400,10 +420,10 @@ impl AutoUpdater {
// you can override this function. You should also update get_remote_server_release_url to return
// Ok(None).
pub async fn download_remote_server_release(
- os: &str,
- arch: &str,
release_channel: ReleaseChannel,
version: Option<SemanticVersion>,
+ os: &str,
+ arch: &str,
set_status: impl Fn(&str, &mut AsyncApp) + Send + 'static,
cx: &mut AsyncApp,
) -> Result<PathBuf> {
@@ -415,13 +435,13 @@ impl AutoUpdater {
})??;
set_status("Fetching remote server release", cx);
- let release = Self::get_release(
+ let release = Self::get_release_asset(
&this,
+ release_channel,
+ version,
"zed-remote-server",
os,
arch,
- version,
- Some(release_channel),
cx,
)
.await?;
@@ -432,7 +452,7 @@ impl AutoUpdater {
let version_path = platform_dir.join(format!("{}.gz", release.version));
smol::fs::create_dir_all(&platform_dir).await.ok();
- let client = this.read_with(cx, |this, _| this.http_client.clone())?;
+ let client = this.read_with(cx, |this, _| this.client.http_client())?;
if smol::fs::metadata(&version_path).await.is_err() {
log::info!(
@@ -440,19 +460,19 @@ impl AutoUpdater {
release.version
);
set_status("Downloading remote server", cx);
- download_remote_server_binary(&version_path, release, client, cx).await?;
+ download_remote_server_binary(&version_path, release, client).await?;
}
Ok(version_path)
}
pub async fn get_remote_server_release_url(
+ channel: ReleaseChannel,
+ version: Option<SemanticVersion>,
os: &str,
arch: &str,
- release_channel: ReleaseChannel,
- version: Option<SemanticVersion>,
cx: &mut AsyncApp,
- ) -> Result<Option<(String, String)>> {
+ ) -> Result<Option<String>> {
let this = cx.update(|cx| {
cx.default_global::<GlobalAutoUpdate>()
.0
@@ -460,108 +480,99 @@ impl AutoUpdater {
.context("auto-update not initialized")
})??;
- let release = Self::get_release(
- &this,
- "zed-remote-server",
- os,
- arch,
- version,
- Some(release_channel),
- cx,
- )
- .await?;
-
- let update_request_body = build_remote_server_update_request_body(cx)?;
- let body = serde_json::to_string(&update_request_body)?;
+ let release =
+ Self::get_release_asset(&this, channel, version, "zed-remote-server", os, arch, cx)
+ .await?;
- Ok(Some((release.url, body)))
+ Ok(Some(release.url))
}
- async fn get_release(
+ async fn get_release_asset(
this: &Entity<Self>,
+ release_channel: ReleaseChannel,
+ version: Option<SemanticVersion>,
asset: &str,
os: &str,
arch: &str,
- version: Option<SemanticVersion>,
- release_channel: Option<ReleaseChannel>,
cx: &mut AsyncApp,
- ) -> Result<JsonRelease> {
- let client = this.read_with(cx, |this, _| this.http_client.clone())?;
-
- if let Some(version) = version {
- let channel = release_channel.map(|c| c.dev_name()).unwrap_or("stable");
-
- let url = format!("/api/releases/{channel}/{version}/{asset}-{os}-{arch}.gz?update=1",);
-
- Ok(JsonRelease {
- version: version.to_string(),
- url: client.build_url(&url),
- })
+ ) -> Result<ReleaseAsset> {
+ let client = this.read_with(cx, |this, _| this.client.clone())?;
+
+ let (system_id, metrics_id, is_staff) = if client.telemetry().metrics_enabled() {
+ (
+ client.telemetry().system_id(),
+ client.telemetry().metrics_id(),
+ client.telemetry().is_staff(),
+ )
} else {
- let mut url_string = client.build_url(&format!(
- "/api/releases/latest?asset={}&os={}&arch={}",
- asset, os, arch
- ));
- if let Some(param) = release_channel.and_then(|c| c.release_query_param()) {
- url_string += "&";
- url_string += param;
- }
+ (None, None, None)
+ };
- let mut response = client.get(&url_string, Default::default(), true).await?;
- let mut body = Vec::new();
- response.body_mut().read_to_end(&mut body).await?;
+ let version = if let Some(version) = version {
+ version.to_string()
+ } else {
+ "latest".to_string()
+ };
+ let http_client = client.http_client();
+
+ let path = format!("/releases/{}/{}/asset", release_channel.dev_name(), version,);
+ let url = http_client.build_zed_cloud_url_with_query(
+ &path,
+ AssetQuery {
+ os,
+ arch,
+ asset,
+ metrics_id: metrics_id.as_deref(),
+ system_id: system_id.as_deref(),
+ is_staff: is_staff,
+ },
+ )?;
- anyhow::ensure!(
- response.status().is_success(),
- "failed to fetch release: {:?}",
- String::from_utf8_lossy(&body),
- );
+ let mut response = http_client
+ .get(url.as_str(), Default::default(), true)
+ .await?;
+ let mut body = Vec::new();
+ response.body_mut().read_to_end(&mut body).await?;
- serde_json::from_slice(body.as_slice()).with_context(|| {
- format!(
- "error deserializing release {:?}",
- String::from_utf8_lossy(&body),
- )
- })
- }
- }
+ anyhow::ensure!(
+ response.status().is_success(),
+ "failed to fetch release: {:?}",
+ String::from_utf8_lossy(&body),
+ );
- async fn get_latest_release(
- this: &Entity<Self>,
- asset: &str,
- os: &str,
- arch: &str,
- release_channel: Option<ReleaseChannel>,
- cx: &mut AsyncApp,
- ) -> Result<JsonRelease> {
- Self::get_release(this, asset, os, arch, None, release_channel, cx).await
+ serde_json::from_slice(body.as_slice()).with_context(|| {
+ format!(
+ "error deserializing release {:?}",
+ String::from_utf8_lossy(&body),
+ )
+ })
}
- async fn update(this: Entity<Self>, mut cx: AsyncApp) -> Result<()> {
+ async fn update(this: Entity<Self>, cx: &mut AsyncApp) -> Result<()> {
let (client, installed_version, previous_status, release_channel) =
- this.read_with(&cx, |this, cx| {
+ this.read_with(cx, |this, cx| {
(
- this.http_client.clone(),
+ this.client.http_client(),
this.current_version,
this.status.clone(),
- ReleaseChannel::try_global(cx),
+ ReleaseChannel::try_global(cx).unwrap_or(ReleaseChannel::Stable),
)
})?;
Self::check_dependencies()?;
- this.update(&mut cx, |this, cx| {
+ this.update(cx, |this, cx| {
this.status = AutoUpdateStatus::Checking;
log::info!("Auto Update: checking for updates");
cx.notify();
})?;
let fetched_release_data =
- Self::get_latest_release(&this, "zed", OS, ARCH, release_channel, &mut cx).await?;
+ Self::get_release_asset(&this, release_channel, None, "zed", OS, ARCH, cx).await?;
let fetched_version = fetched_release_data.clone().version;
let app_commit_sha = cx.update(|cx| AppCommitSha::try_global(cx).map(|sha| sha.full()));
let newer_version = Self::check_if_fetched_version_is_newer(
- *RELEASE_CHANNEL,
+ release_channel,
app_commit_sha,
installed_version,
fetched_version,
@@ -569,7 +580,7 @@ impl AutoUpdater {
)?;
let Some(newer_version) = newer_version else {
- return this.update(&mut cx, |this, cx| {
+ return this.update(cx, |this, cx| {
let status = match previous_status {
AutoUpdateStatus::Updated { .. } => previous_status,
_ => AutoUpdateStatus::Idle,
@@ -579,7 +590,7 @@ impl AutoUpdater {
});
};
- this.update(&mut cx, |this, cx| {
+ this.update(cx, |this, cx| {
this.status = AutoUpdateStatus::Downloading {
version: newer_version.clone(),
};
@@ -588,21 +599,21 @@ impl AutoUpdater {
let installer_dir = InstallerDir::new().await?;
let target_path = Self::target_path(&installer_dir).await?;
- download_release(&target_path, fetched_release_data, client, &cx).await?;
+ download_release(&target_path, fetched_release_data, client).await?;
- this.update(&mut cx, |this, cx| {
+ this.update(cx, |this, cx| {
this.status = AutoUpdateStatus::Installing {
version: newer_version.clone(),
};
cx.notify();
})?;
- let new_binary_path = Self::install_release(installer_dir, target_path, &cx).await?;
+ let new_binary_path = Self::install_release(installer_dir, target_path, cx).await?;
if let Some(new_binary_path) = new_binary_path {
cx.update(|cx| cx.set_restart_path(new_binary_path))?;
}
- this.update(&mut cx, |this, cx| {
+ this.update(cx, |this, cx| {
this.set_should_show_update_notification(true, cx)
.detach_and_log_err(cx);
this.status = AutoUpdateStatus::Updated {
@@ -681,6 +692,12 @@ impl AutoUpdater {
target_path: PathBuf,
cx: &AsyncApp,
) -> Result<Option<PathBuf>> {
+ #[cfg(test)]
+ if let Some(test_install) =
+ cx.try_read_global::<tests::InstallOverride, _>(|g, _| g.0.clone())
+ {
+ return test_install(target_path, cx);
+ }
match OS {
"macos" => install_release_macos(&installer_dir, target_path, cx).await,
"linux" => install_release_linux(&installer_dir, target_path, cx).await,
@@ -731,16 +748,13 @@ impl AutoUpdater {
async fn download_remote_server_binary(
target_path: &PathBuf,
- release: JsonRelease,
+ release: ReleaseAsset,
client: Arc<HttpClientWithUrl>,
- cx: &AsyncApp,
) -> Result<()> {
let temp = tempfile::Builder::new().tempfile_in(remote_servers_dir())?;
let mut temp_file = File::create(&temp).await?;
- let update_request_body = build_remote_server_update_request_body(cx)?;
- let request_body = AsyncBody::from(serde_json::to_string(&update_request_body)?);
- let mut response = client.get(&release.url, request_body, true).await?;
+ let mut response = client.get(&release.url, Default::default(), true).await?;
anyhow::ensure!(
response.status().is_success(),
"failed to download remote server release: {:?}",
@@ -752,65 +766,19 @@ async fn download_remote_server_binary(
Ok(())
}
-fn build_remote_server_update_request_body(cx: &AsyncApp) -> Result<UpdateRequestBody> {
- let (installation_id, release_channel, telemetry_enabled, is_staff) = cx.update(|cx| {
- let telemetry = Client::global(cx).telemetry().clone();
- let is_staff = telemetry.is_staff();
- let installation_id = telemetry.installation_id();
- let release_channel =
- ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
- let telemetry_enabled = TelemetrySettings::get_global(cx).metrics;
-
- (
- installation_id,
- release_channel,
- telemetry_enabled,
- is_staff,
- )
- })?;
-
- Ok(UpdateRequestBody {
- installation_id,
- release_channel,
- telemetry: telemetry_enabled,
- is_staff,
- destination: "remote",
- })
-}
-
async fn download_release(
target_path: &Path,
- release: JsonRelease,
+ release: ReleaseAsset,
client: Arc<HttpClientWithUrl>,
- cx: &AsyncApp,
) -> Result<()> {
let mut target_file = File::create(&target_path).await?;
- let (installation_id, release_channel, telemetry_enabled, is_staff) = cx.update(|cx| {
- let telemetry = Client::global(cx).telemetry().clone();
- let is_staff = telemetry.is_staff();
- let installation_id = telemetry.installation_id();
- let release_channel =
- ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
- let telemetry_enabled = TelemetrySettings::get_global(cx).metrics;
-
- (
- installation_id,
- release_channel,
- telemetry_enabled,
- is_staff,
- )
- })?;
-
- let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody {
- installation_id,
- release_channel,
- telemetry: telemetry_enabled,
- is_staff,
- destination: "local",
- })?);
-
- let mut response = client.get(&release.url, request_body, true).await?;
+ let mut response = client.get(&release.url, Default::default(), true).await?;
+ anyhow::ensure!(
+ response.status().is_success(),
+ "failed to download update: {:?}",
+ response.status()
+ );
smol::io::copy(response.body_mut(), &mut target_file).await?;
log::info!("downloaded update. path:{:?}", target_path);
@@ -934,28 +902,16 @@ async fn install_release_macos(
Ok(None)
}
-#[cfg(target_os = "windows")]
async fn cleanup_windows() -> Result<()> {
- use util::ResultExt;
-
let parent = std::env::current_exe()?
.parent()
.context("No parent dir for Zed.exe")?
.to_owned();
// keep in sync with crates/auto_update_helper/src/updater.rs
- smol::fs::remove_dir(parent.join("updates"))
- .await
- .context("failed to remove updates dir")
- .log_err();
- smol::fs::remove_dir(parent.join("install"))
- .await
- .context("failed to remove install dir")
- .log_err();
- smol::fs::remove_dir(parent.join("old"))
- .await
- .context("failed to remove old version dir")
- .log_err();
+ _ = smol::fs::remove_dir(parent.join("updates")).await;
+ _ = smol::fs::remove_dir(parent.join("install")).await;
+ _ = smol::fs::remove_dir(parent.join("old")).await;
Ok(())
}
@@ -1010,11 +966,33 @@ pub async fn finalize_auto_update_on_quit() {
#[cfg(test)]
mod tests {
+ use client::Client;
+ use clock::FakeSystemClock;
+ use futures::channel::oneshot;
use gpui::TestAppContext;
+ use http_client::{FakeHttpClient, Response};
use settings::default_settings;
+ use std::{
+ rc::Rc,
+ sync::{
+ Arc,
+ atomic::{self, AtomicBool},
+ },
+ };
+ use tempfile::tempdir;
+
+ #[ctor::ctor]
+ fn init_logger() {
+ zlog::init_test();
+ }
use super::*;
+ pub(super) struct InstallOverride(
+ pub Rc<dyn Fn(PathBuf, &AsyncApp) -> Result<Option<PathBuf>>>,
+ );
+ impl Global for InstallOverride {}
+
#[gpui::test]
fn test_auto_update_defaults_to_true(cx: &mut TestAppContext) {
cx.update(|cx| {
@@ -1030,6 +1008,115 @@ mod tests {
});
}
+ #[gpui::test]
+ async fn test_auto_update_downloads(cx: &mut TestAppContext) {
+ cx.background_executor.allow_parking();
+ zlog::init_test();
+ let release_available = Arc::new(AtomicBool::new(false));
+
+ let (dmg_tx, dmg_rx) = oneshot::channel::<String>();
+
+ cx.update(|cx| {
+ settings::init(cx);
+
+ let current_version = SemanticVersion::new(0, 100, 0);
+ release_channel::init_test(current_version, ReleaseChannel::Stable, cx);
+
+ let clock = Arc::new(FakeSystemClock::new());
+ let release_available = Arc::clone(&release_available);
+ let dmg_rx = Arc::new(parking_lot::Mutex::new(Some(dmg_rx)));
+ let fake_client_http = FakeHttpClient::create(move |req| {
+ let release_available = release_available.load(atomic::Ordering::Relaxed);
+ let dmg_rx = dmg_rx.clone();
+ async move {
+ if req.uri().path() == "/releases/stable/latest/asset" {
+ if release_available {
+ return Ok(Response::builder().status(200).body(
+ r#"{"version":"0.100.1","url":"https://test.example/new-download"}"#.into()
+ ).unwrap());
+ } else {
+ return Ok(Response::builder().status(200).body(
+ r#"{"version":"0.100.0","url":"https://test.example/old-download"}"#.into()
+ ).unwrap());
+ }
+ } else if req.uri().path() == "/new-download" {
+ return Ok(Response::builder().status(200).body({
+ let dmg_rx = dmg_rx.lock().take().unwrap();
+ dmg_rx.await.unwrap().into()
+ }).unwrap());
+ }
+ Ok(Response::builder().status(404).body("".into()).unwrap())
+ }
+ });
+ let client = Client::new(clock, fake_client_http, cx);
+ crate::init(client, cx);
+ });
+
+ let auto_updater = cx.update(|cx| AutoUpdater::get(cx).expect("auto updater should exist"));
+
+ cx.background_executor.run_until_parked();
+
+ auto_updater.read_with(cx, |updater, _| {
+ assert_eq!(updater.status(), AutoUpdateStatus::Idle);
+ assert_eq!(updater.current_version(), SemanticVersion::new(0, 100, 0));
+ });
+
+ release_available.store(true, atomic::Ordering::SeqCst);
+ cx.background_executor.advance_clock(POLL_INTERVAL);
+ cx.background_executor.run_until_parked();
+
+ loop {
+ cx.background_executor.timer(Duration::from_millis(0)).await;
+ cx.run_until_parked();
+ let status = auto_updater.read_with(cx, |updater, _| updater.status());
+ if !matches!(status, AutoUpdateStatus::Idle) {
+ break;
+ }
+ }
+ let status = auto_updater.read_with(cx, |updater, _| updater.status());
+ assert_eq!(
+ status,
+ AutoUpdateStatus::Downloading {
+ version: VersionCheckType::Semantic(SemanticVersion::new(0, 100, 1))
+ }
+ );
+
+ dmg_tx.send("<fake-zed-update>".to_owned()).unwrap();
+
+ let tmp_dir = Arc::new(tempdir().unwrap());
+
+ cx.update(|cx| {
+ let tmp_dir = tmp_dir.clone();
+ cx.set_global(InstallOverride(Rc::new(move |target_path, _cx| {
+ let tmp_dir = tmp_dir.clone();
+ let dest_path = tmp_dir.path().join("zed");
+ std::fs::copy(&target_path, &dest_path)?;
+ Ok(Some(dest_path))
+ })));
+ });
+
+ loop {
+ cx.background_executor.timer(Duration::from_millis(0)).await;
+ cx.run_until_parked();
+ let status = auto_updater.read_with(cx, |updater, _| updater.status());
+ if !matches!(status, AutoUpdateStatus::Downloading { .. }) {
+ break;
+ }
+ }
+ let status = auto_updater.read_with(cx, |updater, _| updater.status());
+ assert_eq!(
+ status,
+ AutoUpdateStatus::Updated {
+ version: VersionCheckType::Semantic(SemanticVersion::new(0, 100, 1))
+ }
+ );
+ let will_restart = cx.expect_restart();
+ cx.update(|cx| cx.restart());
+ let path = will_restart.await.unwrap().unwrap();
+ assert_eq!(path, tmp_dir.path().join("zed"));
+ assert_eq!(std::fs::read_to_string(path).unwrap(), "<fake-zed-update>");
+ }
+
#[test]
fn test_stable_does_not_update_when_fetched_version_is_not_higher() {
let release_channel = ReleaseChannel::Stable;
@@ -1,6 +1,6 @@
use std::{
- cell::LazyCell,
path::Path,
+ sync::LazyLock,
time::{Duration, Instant},
};
@@ -13,8 +13,8 @@ use windows::Win32::{
use crate::windows_impl::WM_JOB_UPDATED;
pub(crate) struct Job {
- pub apply: Box<dyn Fn(&Path) -> Result<()>>,
- pub rollback: Box<dyn Fn(&Path) -> Result<()>>,
+ pub apply: Box<dyn Fn(&Path) -> Result<()> + Send + Sync>,
+ pub rollback: Box<dyn Fn(&Path) -> Result<()> + Send + Sync>,
}
impl Job {
@@ -154,10 +154,8 @@ impl Job {
}
}
-// app is single threaded
#[cfg(not(test))]
-#[allow(clippy::declare_interior_mutable_const)]
-pub(crate) const JOBS: LazyCell<[Job; 22]> = LazyCell::new(|| {
+pub(crate) static JOBS: LazyLock<[Job; 22]> = LazyLock::new(|| {
fn p(value: &str) -> &Path {
Path::new(value)
}
@@ -206,10 +204,8 @@ pub(crate) const JOBS: LazyCell<[Job; 22]> = LazyCell::new(|| {
]
});
-// app is single threaded
#[cfg(test)]
-#[allow(clippy::declare_interior_mutable_const)]
-pub(crate) const JOBS: LazyCell<[Job; 9]> = LazyCell::new(|| {
+pub(crate) static JOBS: LazyLock<[Job; 9]> = LazyLock::new(|| {
fn p(value: &str) -> &Path {
Path::new(value)
}
@@ -1487,7 +1487,7 @@ impl Client {
let url = self
.http
- .build_zed_cloud_url("/internal/users/impersonate", &[])?;
+ .build_zed_cloud_url("/internal/users/impersonate")?;
let request = Request::post(url.as_str())
.header("Content-Type", "application/json")
.header("Authorization", format!("Bearer {api_token}"))
@@ -435,7 +435,7 @@ impl Telemetry {
Some(project_types)
}
- fn report_event(self: &Arc<Self>, event: Event) {
+ fn report_event(self: &Arc<Self>, mut event: Event) {
let mut state = self.state.lock();
// RUST_LOG=telemetry=trace to debug telemetry events
log::trace!(target: "telemetry", "{:?}", event);
@@ -444,6 +444,12 @@ impl Telemetry {
return;
}
+ match &mut event {
+ Event::Flexible(event) => event
+ .event_properties
+ .insert("event_source".into(), "zed".into()),
+ };
+
if state.flush_events_task.is_none() {
let this = self.clone();
state.flush_events_task = Some(self.executor.spawn(async move {
@@ -51,3 +51,11 @@ pub fn external_agents_docs(cx: &App) -> String {
server_url = server_url(cx)
)
}
+
+/// Returns the URL to Zed agent servers documentation.
+pub fn agent_server_docs(cx: &App) -> String {
+ format!(
+ "{server_url}/docs/extensions/agent-servers",
+ server_url = server_url(cx)
+ )
+}
@@ -62,7 +62,7 @@ impl CloudApiClient {
let request = self.build_request(
Request::builder().method(Method::GET).uri(
self.http_client
- .build_zed_cloud_url("/client/users/me", &[])?
+ .build_zed_cloud_url("/client/users/me")?
.as_ref(),
),
AsyncBody::default(),
@@ -89,7 +89,7 @@ impl CloudApiClient {
pub fn connect(&self, cx: &App) -> Result<Task<Result<Connection>>> {
let mut connect_url = self
.http_client
- .build_zed_cloud_url("/client/users/connect", &[])?;
+ .build_zed_cloud_url("/client/users/connect")?;
connect_url
.set_scheme(match connect_url.scheme() {
"https" => "wss",
@@ -123,7 +123,7 @@ impl CloudApiClient {
.method(Method::POST)
.uri(
self.http_client
- .build_zed_cloud_url("/client/llm_tokens", &[])?
+ .build_zed_cloud_url("/client/llm_tokens")?
.as_ref(),
)
.when_some(system_id, |builder, system_id| {
@@ -154,7 +154,7 @@ impl CloudApiClient {
let request = build_request(
Request::builder().method(Method::GET).uri(
self.http_client
- .build_zed_cloud_url("/client/users/me", &[])?
+ .build_zed_cloud_url("/client/users/me")?
.as_ref(),
),
AsyncBody::default(),
@@ -183,13 +183,13 @@ pub struct PredictEditsGitInfo {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PredictEditsResponse {
- pub request_id: Uuid,
+ pub request_id: String,
pub output_excerpt: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AcceptEditPredictionBody {
- pub request_id: Uuid,
+ pub request_id: String,
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)]
@@ -73,6 +73,7 @@ pub enum PromptFormat {
MarkedExcerpt,
LabeledSections,
NumLinesUniDiff,
+ OldTextNewText,
/// Prompt format intended for use via zeta_cli
OnlySnippets,
}
@@ -100,6 +101,7 @@ impl std::fmt::Display for PromptFormat {
PromptFormat::LabeledSections => write!(f, "Labeled Sections"),
PromptFormat::OnlySnippets => write!(f, "Only Snippets"),
PromptFormat::NumLinesUniDiff => write!(f, "Numbered Lines / Unified Diff"),
+ PromptFormat::OldTextNewText => write!(f, "Old Text / New Text"),
}
}
}
@@ -56,50 +56,98 @@ const LABELED_SECTIONS_INSTRUCTIONS: &str = indoc! {r#"
const NUMBERED_LINES_INSTRUCTIONS: &str = indoc! {r#"
# Instructions
- You are a code completion assistant helping a programmer finish their work. Your task is to:
+ You are an edit prediction agent in a code editor.
+ Your job is to predict the next edit that the user will make,
+ based on their last few edits and their current cursor location.
- 1. Analyze the edit history to understand what the programmer is trying to achieve
- 2. Identify any incomplete refactoring or changes that need to be finished
- 3. Make the remaining edits that a human programmer would logically make next
- 4. Apply systematic changes consistently across the entire codebase - if you see a pattern starting, complete it everywhere.
+ ## Output Format
- Focus on:
- - Understanding the intent behind the changes (e.g., improving error handling, refactoring APIs, fixing bugs)
- - Completing any partially-applied changes across the codebase
- - Ensuring consistency with the programming style and patterns already established
- - Making edits that maintain or improve code quality
- - If the programmer started refactoring one instance of a pattern, find and update ALL similar instances
- - Don't write a lot of code if you're not sure what to do
-
- Rules:
- - Do not just mechanically apply patterns - reason about what changes make sense given the context and the programmer's apparent goals.
- - Do not just fix syntax errors - look for the broader refactoring pattern and apply it systematically throughout the code.
- - Write the edits in the unified diff format as shown in the example.
-
- # Example output:
+ You must briefly explain your understanding of the user's goal, in one
+ or two sentences, and then specify their next edit in the form of a
+ unified diff, like this:
```
--- a/src/myapp/cli.py
+++ b/src/myapp/cli.py
- @@ -1,3 +1,3 @@
- -
- -
- -import sys
- +import json
+ @@ ... @@
+ import os
+ import time
+ import sys
+ +from constants import LOG_LEVEL_WARNING
+ @@ ... @@
+ config.headless()
+ config.set_interactive(false)
+ -config.set_log_level(LOG_L)
+ +config.set_log_level(LOG_LEVEL_WARNING)
+ config.set_use_color(True)
```
- # Edit History:
+ ## Edit History
"#};
const UNIFIED_DIFF_REMINDER: &str = indoc! {"
---
- Please analyze the edit history and the files, then provide the unified diff for your predicted edits.
+ Analyze the edit history and the files, then provide the unified diff for your predicted edits.
Do not include the cursor marker in your output.
- If you're editing multiple files, be sure to reflect filename in the hunk's header.
+ Your diff should include edited file paths in its file headers (lines beginning with `---` and `+++`).
+ Do not include line numbers in the hunk headers, use `@@ ... @@`.
+ Removed lines begin with `-`.
+ Added lines begin with `+`.
+ Context lines begin with an extra space.
+ Context and removed lines are used to match the target edit location, so make sure to include enough of them
+ to uniquely identify it amongst all excerpts of code provided.
"};
+const XML_TAGS_INSTRUCTIONS: &str = indoc! {r#"
+ # Instructions
+
+ You are an edit prediction agent in a code editor.
+ Your job is to predict the next edit that the user will make,
+ based on their last few edits and their current cursor location.
+
+ # Output Format
+
+ You must briefly explain your understanding of the user's goal, in one
+ or two sentences, and then specify their next edit, using the following
+ XML format:
+
+ <edits path="my-project/src/myapp/cli.py">
+ <old_text>
+ OLD TEXT 1 HERE
+ </old_text>
+ <new_text>
+ NEW TEXT 1 HERE
+ </new_text>
+
+ <old_text>
+ OLD TEXT 1 HERE
+ </old_text>
+ <new_text>
+ NEW TEXT 1 HERE
+ </new_text>
+ </edits>
+
+ - Specify the file to edit using the `path` attribute.
+ - Use `<old_text>` and `<new_text>` tags to replace content
+ - `<old_text>` must exactly match existing file content, including indentation
+ - `<old_text>` cannot be empty
+ - Do not escape quotes, newlines, or other characters within tags
+ - Always close all tags properly
+ - Don't include the <|user_cursor|> marker in your output.
+
+ # Edit History:
+
+"#};
+
+const OLD_TEXT_NEW_TEXT_REMINDER: &str = indoc! {r#"
+ ---
+
+ Remember that the edits in the edit history have already been deployed.
+ The files are currently as shown in the Code Excerpts section.
+"#};
+
pub fn build_prompt(
request: &predict_edits_v3::PredictEditsRequest,
) -> Result<(String, SectionLabels)> {
@@ -121,8 +169,9 @@ pub fn build_prompt(
EDITABLE_REGION_END_MARKER_WITH_NEWLINE,
),
],
- PromptFormat::LabeledSections => vec![(request.cursor_point, CURSOR_MARKER)],
- PromptFormat::NumLinesUniDiff => {
+ PromptFormat::LabeledSections
+ | PromptFormat::NumLinesUniDiff
+ | PromptFormat::OldTextNewText => {
vec![(request.cursor_point, CURSOR_MARKER)]
}
PromptFormat::OnlySnippets => vec![],
@@ -132,46 +181,32 @@ pub fn build_prompt(
PromptFormat::MarkedExcerpt => MARKED_EXCERPT_INSTRUCTIONS.to_string(),
PromptFormat::LabeledSections => LABELED_SECTIONS_INSTRUCTIONS.to_string(),
PromptFormat::NumLinesUniDiff => NUMBERED_LINES_INSTRUCTIONS.to_string(),
- // only intended for use via zeta_cli
+ PromptFormat::OldTextNewText => XML_TAGS_INSTRUCTIONS.to_string(),
PromptFormat::OnlySnippets => String::new(),
};
if request.events.is_empty() {
prompt.push_str("(No edit history)\n\n");
} else {
- prompt.push_str(
- "The following are the latest edits made by the user, from earlier to later.\n\n",
- );
+ prompt.push_str("Here are the latest edits made by the user, from earlier to later.\n\n");
push_events(&mut prompt, &request.events);
}
+ prompt.push_str(indoc! {"
+ # Code Excerpts
+
+ The cursor marker <|user_cursor|> indicates the current user cursor position.
+ The file is in current state, edits from edit history have been applied.
+ "});
+
if request.prompt_format == PromptFormat::NumLinesUniDiff {
- if request.referenced_declarations.is_empty() {
- prompt.push_str(indoc! {"
- # File under the cursor:
-
- The cursor marker <|user_cursor|> indicates the current user cursor position.
- The file is in current state, edits from edit history have been applied.
- We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.
-
- "});
- } else {
- // Note: This hasn't been trained on yet
- prompt.push_str(indoc! {"
- # Code Excerpts:
-
- The cursor marker <|user_cursor|> indicates the current user cursor position.
- Other excerpts of code from the project have been included as context based on their similarity to the code under the cursor.
- Context excerpts are not guaranteed to be relevant, so use your own judgement.
- Files are in their current state, edits from edit history have been applied.
- We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.
-
- "});
- }
- } else {
- prompt.push_str("\n## Code\n\n");
+ prompt.push_str(indoc! {"
+ We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.
+ "});
}
+ prompt.push('\n');
+
let mut section_labels = Default::default();
if !request.referenced_declarations.is_empty() || !request.signatures.is_empty() {
@@ -198,8 +233,14 @@ pub fn build_prompt(
}
}
- if request.prompt_format == PromptFormat::NumLinesUniDiff {
- prompt.push_str(UNIFIED_DIFF_REMINDER);
+ match request.prompt_format {
+ PromptFormat::NumLinesUniDiff => {
+ prompt.push_str(UNIFIED_DIFF_REMINDER);
+ }
+ PromptFormat::OldTextNewText => {
+ prompt.push_str(OLD_TEXT_NEW_TEXT_REMINDER);
+ }
+ _ => {}
}
Ok((prompt, section_labels))
@@ -624,6 +665,7 @@ impl<'a> SyntaxBasedPrompt<'a> {
match self.request.prompt_format {
PromptFormat::MarkedExcerpt
| PromptFormat::OnlySnippets
+ | PromptFormat::OldTextNewText
| PromptFormat::NumLinesUniDiff => {
if range.start.0 > 0 && !skipped_last_snippet {
output.push_str("β¦\n");
@@ -291,29 +291,6 @@ CREATE TABLE IF NOT EXISTS "channel_chat_participants" (
CREATE INDEX "index_channel_chat_participants_on_channel_id" ON "channel_chat_participants" ("channel_id");
-CREATE TABLE IF NOT EXISTS "channel_messages" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "sender_id" INTEGER NOT NULL REFERENCES users (id),
- "body" TEXT NOT NULL,
- "sent_at" TIMESTAMP,
- "edited_at" TIMESTAMP,
- "nonce" BLOB NOT NULL,
- "reply_to_message_id" INTEGER DEFAULT NULL
-);
-
-CREATE INDEX "index_channel_messages_on_channel_id" ON "channel_messages" ("channel_id");
-
-CREATE UNIQUE INDEX "index_channel_messages_on_sender_id_nonce" ON "channel_messages" ("sender_id", "nonce");
-
-CREATE TABLE "channel_message_mentions" (
- "message_id" INTEGER NOT NULL REFERENCES channel_messages (id) ON DELETE CASCADE,
- "start_offset" INTEGER NOT NULL,
- "end_offset" INTEGER NOT NULL,
- "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- PRIMARY KEY (message_id, start_offset)
-);
-
CREATE TABLE "channel_members" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
@@ -408,15 +385,6 @@ CREATE TABLE "observed_buffer_edits" (
CREATE UNIQUE INDEX "index_observed_buffers_user_and_buffer_id" ON "observed_buffer_edits" ("user_id", "buffer_id");
-CREATE TABLE IF NOT EXISTS "observed_channel_messages" (
- "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "channel_message_id" INTEGER NOT NULL,
- PRIMARY KEY (user_id, channel_id)
-);
-
-CREATE UNIQUE INDEX "index_observed_channel_messages_user_and_channel_id" ON "observed_channel_messages" ("user_id", "channel_id");
-
CREATE TABLE "notification_kinds" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"name" VARCHAR NOT NULL
@@ -0,0 +1,3 @@
+drop table observed_channel_messages;
+drop table channel_message_mentions;
+drop table channel_messages;
@@ -0,0 +1 @@
+drop table embeddings;
@@ -5,7 +5,6 @@ pub mod buffers;
pub mod channels;
pub mod contacts;
pub mod contributors;
-pub mod embeddings;
pub mod extensions;
pub mod notifications;
pub mod projects;
@@ -1,94 +0,0 @@
-use super::*;
-use time::Duration;
-use time::OffsetDateTime;
-
-impl Database {
- pub async fn get_embeddings(
- &self,
- model: &str,
- digests: &[Vec<u8>],
- ) -> Result<HashMap<Vec<u8>, Vec<f32>>> {
- self.transaction(|tx| async move {
- let embeddings = {
- let mut db_embeddings = embedding::Entity::find()
- .filter(
- embedding::Column::Model.eq(model).and(
- embedding::Column::Digest
- .is_in(digests.iter().map(|digest| digest.as_slice())),
- ),
- )
- .stream(&*tx)
- .await?;
-
- let mut embeddings = HashMap::default();
- while let Some(db_embedding) = db_embeddings.next().await {
- let db_embedding = db_embedding?;
- embeddings.insert(db_embedding.digest, db_embedding.dimensions);
- }
- embeddings
- };
-
- if !embeddings.is_empty() {
- let now = OffsetDateTime::now_utc();
- let retrieved_at = PrimitiveDateTime::new(now.date(), now.time());
-
- embedding::Entity::update_many()
- .filter(
- embedding::Column::Digest
- .is_in(embeddings.keys().map(|digest| digest.as_slice())),
- )
- .col_expr(embedding::Column::RetrievedAt, Expr::value(retrieved_at))
- .exec(&*tx)
- .await?;
- }
-
- Ok(embeddings)
- })
- .await
- }
-
- pub async fn save_embeddings(
- &self,
- model: &str,
- embeddings: &HashMap<Vec<u8>, Vec<f32>>,
- ) -> Result<()> {
- self.transaction(|tx| async move {
- embedding::Entity::insert_many(embeddings.iter().map(|(digest, dimensions)| {
- let now_offset_datetime = OffsetDateTime::now_utc();
- let retrieved_at =
- PrimitiveDateTime::new(now_offset_datetime.date(), now_offset_datetime.time());
-
- embedding::ActiveModel {
- model: ActiveValue::set(model.to_string()),
- digest: ActiveValue::set(digest.clone()),
- dimensions: ActiveValue::set(dimensions.clone()),
- retrieved_at: ActiveValue::set(retrieved_at),
- }
- }))
- .on_conflict(
- OnConflict::columns([embedding::Column::Model, embedding::Column::Digest])
- .do_nothing()
- .to_owned(),
- )
- .exec_without_returning(&*tx)
- .await?;
- Ok(())
- })
- .await
- }
-
- pub async fn purge_old_embeddings(&self) -> Result<()> {
- self.transaction(|tx| async move {
- embedding::Entity::delete_many()
- .filter(
- embedding::Column::RetrievedAt
- .lte(OffsetDateTime::now_utc() - Duration::days(60)),
- )
- .exec(&*tx)
- .await?;
-
- Ok(())
- })
- .await
- }
-}
@@ -66,40 +66,6 @@ impl Database {
.await
}
- /// Returns all users flagged as staff.
- pub async fn get_staff_users(&self) -> Result<Vec<user::Model>> {
- self.transaction(|tx| async {
- let tx = tx;
- Ok(user::Entity::find()
- .filter(user::Column::Admin.eq(true))
- .all(&*tx)
- .await?)
- })
- .await
- }
-
- /// Returns a user by email address. There are no access checks here, so this should only be used internally.
- pub async fn get_user_by_email(&self, email: &str) -> Result<Option<User>> {
- self.transaction(|tx| async move {
- Ok(user::Entity::find()
- .filter(user::Column::EmailAddress.eq(email))
- .one(&*tx)
- .await?)
- })
- .await
- }
-
- /// Returns a user by GitHub user ID. There are no access checks here, so this should only be used internally.
- pub async fn get_user_by_github_user_id(&self, github_user_id: i32) -> Result<Option<User>> {
- self.transaction(|tx| async move {
- Ok(user::Entity::find()
- .filter(user::Column::GithubUserId.eq(github_user_id))
- .one(&*tx)
- .await?)
- })
- .await
- }
-
/// Returns a user by GitHub login. There are no access checks here, so this should only be used internally.
pub async fn get_user_by_github_login(&self, github_login: &str) -> Result<Option<User>> {
self.transaction(|tx| async move {
@@ -270,39 +236,6 @@ impl Database {
.await
}
- /// Sets "accepted_tos_at" on the user to the given timestamp.
- pub async fn set_user_accepted_tos_at(
- &self,
- id: UserId,
- accepted_tos_at: Option<DateTime>,
- ) -> Result<()> {
- self.transaction(|tx| async move {
- user::Entity::update_many()
- .filter(user::Column::Id.eq(id))
- .set(user::ActiveModel {
- accepted_tos_at: ActiveValue::set(accepted_tos_at),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
- Ok(())
- })
- .await
- }
-
- /// hard delete the user.
- pub async fn destroy_user(&self, id: UserId) -> Result<()> {
- self.transaction(|tx| async move {
- access_token::Entity::delete_many()
- .filter(access_token::Column::UserId.eq(id))
- .exec(&*tx)
- .await?;
- user::Entity::delete_by_id(id).exec(&*tx).await?;
- Ok(())
- })
- .await
- }
-
/// Find users where github_login ILIKE name_query.
pub async fn fuzzy_search_users(&self, name_query: &str, limit: u32) -> Result<Vec<User>> {
self.transaction(|tx| async {
@@ -341,14 +274,4 @@ impl Database {
result.push('%');
result
}
-
- pub async fn get_users_missing_github_user_created_at(&self) -> Result<Vec<user::Model>> {
- self.transaction(|tx| async move {
- Ok(user::Entity::find()
- .filter(user::Column::GithubUserCreatedAt.is_null())
- .all(&*tx)
- .await?)
- })
- .await
- }
}
@@ -6,11 +6,8 @@ pub mod channel;
pub mod channel_buffer_collaborator;
pub mod channel_chat_participant;
pub mod channel_member;
-pub mod channel_message;
-pub mod channel_message_mention;
pub mod contact;
pub mod contributor;
-pub mod embedding;
pub mod extension;
pub mod extension_version;
pub mod follower;
@@ -18,7 +15,6 @@ pub mod language_server;
pub mod notification;
pub mod notification_kind;
pub mod observed_buffer_edits;
-pub mod observed_channel_messages;
pub mod project;
pub mod project_collaborator;
pub mod project_repository;
@@ -1,47 +0,0 @@
-use crate::db::{ChannelId, MessageId, UserId};
-use sea_orm::entity::prelude::*;
-use time::PrimitiveDateTime;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "channel_messages")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: MessageId,
- pub channel_id: ChannelId,
- pub sender_id: UserId,
- pub body: String,
- pub sent_at: PrimitiveDateTime,
- pub edited_at: Option<PrimitiveDateTime>,
- pub nonce: Uuid,
- pub reply_to_message_id: Option<MessageId>,
-}
-
-impl ActiveModelBehavior for ActiveModel {}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::channel::Entity",
- from = "Column::ChannelId",
- to = "super::channel::Column::Id"
- )]
- Channel,
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::SenderId",
- to = "super::user::Column::Id"
- )]
- Sender,
-}
-
-impl Related<super::channel::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Channel.def()
- }
-}
-
-impl Related<super::user::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Sender.def()
- }
-}
@@ -1,43 +0,0 @@
-use crate::db::{MessageId, UserId};
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "channel_message_mentions")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub message_id: MessageId,
- #[sea_orm(primary_key)]
- pub start_offset: i32,
- pub end_offset: i32,
- pub user_id: UserId,
-}
-
-impl ActiveModelBehavior for ActiveModel {}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::channel_message::Entity",
- from = "Column::MessageId",
- to = "super::channel_message::Column::Id"
- )]
- Message,
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::UserId",
- to = "super::user::Column::Id"
- )]
- MentionedUser,
-}
-
-impl Related<super::channel::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Message.def()
- }
-}
-
-impl Related<super::user::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::MentionedUser.def()
- }
-}
@@ -1,18 +0,0 @@
-use sea_orm::entity::prelude::*;
-use time::PrimitiveDateTime;
-
-#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
-#[sea_orm(table_name = "embeddings")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub model: String,
- #[sea_orm(primary_key)]
- pub digest: Vec<u8>,
- pub dimensions: Vec<f32>,
- pub retrieved_at: PrimitiveDateTime,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,41 +0,0 @@
-use crate::db::{ChannelId, MessageId, UserId};
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "observed_channel_messages")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub user_id: UserId,
- pub channel_id: ChannelId,
- pub channel_message_id: MessageId,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::channel::Entity",
- from = "Column::ChannelId",
- to = "super::channel::Column::Id"
- )]
- Channel,
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::UserId",
- to = "super::user::Column::Id"
- )]
- User,
-}
-
-impl Related<super::channel::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Channel.def()
- }
-}
-
-impl Related<super::user::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::User.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -2,11 +2,7 @@ mod buffer_tests;
mod channel_tests;
mod contributor_tests;
mod db_tests;
-// we only run postgres tests on macos right now
-#[cfg(target_os = "macos")]
-mod embedding_tests;
mod extension_tests;
-mod user_tests;
use crate::migrations::run_database_migrations;
@@ -1,7 +1,7 @@
use super::*;
use crate::test_both_dbs;
use chrono::Utc;
-use pretty_assertions::{assert_eq, assert_ne};
+use pretty_assertions::assert_eq;
use std::sync::Arc;
test_both_dbs!(
@@ -457,53 +457,6 @@ async fn test_add_contacts(db: &Arc<Database>) {
);
}
-test_both_dbs!(
- test_metrics_id,
- test_metrics_id_postgres,
- test_metrics_id_sqlite
-);
-
-async fn test_metrics_id(db: &Arc<Database>) {
- let NewUserResult {
- user_id: user1,
- metrics_id: metrics_id1,
- ..
- } = db
- .create_user(
- "person1@example.com",
- None,
- false,
- NewUserParams {
- github_login: "person1".into(),
- github_user_id: 101,
- },
- )
- .await
- .unwrap();
- let NewUserResult {
- user_id: user2,
- metrics_id: metrics_id2,
- ..
- } = db
- .create_user(
- "person2@example.com",
- None,
- false,
- NewUserParams {
- github_login: "person2".into(),
- github_user_id: 102,
- },
- )
- .await
- .unwrap();
-
- assert_eq!(db.get_user_metrics_id(user1).await.unwrap(), metrics_id1);
- assert_eq!(db.get_user_metrics_id(user2).await.unwrap(), metrics_id2);
- assert_eq!(metrics_id1.len(), 36);
- assert_eq!(metrics_id2.len(), 36);
- assert_ne!(metrics_id1, metrics_id2);
-}
-
test_both_dbs!(
test_project_count,
test_project_count_postgres,
@@ -1,87 +0,0 @@
-use super::TestDb;
-use crate::db::embedding;
-use collections::HashMap;
-use sea_orm::{ColumnTrait, EntityTrait, QueryFilter, sea_query::Expr};
-use std::ops::Sub;
-use time::{Duration, OffsetDateTime, PrimitiveDateTime};
-
-// SQLite does not support array arguments, so we only test this against a real postgres instance
-#[gpui::test]
-async fn test_get_embeddings_postgres(cx: &mut gpui::TestAppContext) {
- let test_db = TestDb::postgres(cx.executor());
- let db = test_db.db();
-
- let provider = "test_model";
- let digest1 = vec![1, 2, 3];
- let digest2 = vec![4, 5, 6];
- let embeddings = HashMap::from_iter([
- (digest1.clone(), vec![0.1, 0.2, 0.3]),
- (digest2.clone(), vec![0.4, 0.5, 0.6]),
- ]);
-
- // Save embeddings
- db.save_embeddings(provider, &embeddings).await.unwrap();
-
- // Retrieve embeddings
- let retrieved_embeddings = db
- .get_embeddings(provider, &[digest1.clone(), digest2.clone()])
- .await
- .unwrap();
- assert_eq!(retrieved_embeddings.len(), 2);
- assert!(retrieved_embeddings.contains_key(&digest1));
- assert!(retrieved_embeddings.contains_key(&digest2));
-
- // Check if the retrieved embeddings are correct
- assert_eq!(retrieved_embeddings[&digest1], vec![0.1, 0.2, 0.3]);
- assert_eq!(retrieved_embeddings[&digest2], vec![0.4, 0.5, 0.6]);
-}
-
-#[gpui::test]
-async fn test_purge_old_embeddings(cx: &mut gpui::TestAppContext) {
- let test_db = TestDb::postgres(cx.executor());
- let db = test_db.db();
-
- let model = "test_model";
- let digest = vec![7, 8, 9];
- let embeddings = HashMap::from_iter([(digest.clone(), vec![0.7, 0.8, 0.9])]);
-
- // Save old embeddings
- db.save_embeddings(model, &embeddings).await.unwrap();
-
- // Reach into the DB and change the retrieved at to be > 60 days
- db.transaction(|tx| {
- let digest = digest.clone();
- async move {
- let sixty_days_ago = OffsetDateTime::now_utc().sub(Duration::days(61));
- let retrieved_at = PrimitiveDateTime::new(sixty_days_ago.date(), sixty_days_ago.time());
-
- embedding::Entity::update_many()
- .filter(
- embedding::Column::Model
- .eq(model)
- .and(embedding::Column::Digest.eq(digest)),
- )
- .col_expr(embedding::Column::RetrievedAt, Expr::value(retrieved_at))
- .exec(&*tx)
- .await
- .unwrap();
-
- Ok(())
- }
- })
- .await
- .unwrap();
-
- // Purge old embeddings
- db.purge_old_embeddings().await.unwrap();
-
- // Try to retrieve the purged embeddings
- let retrieved_embeddings = db
- .get_embeddings(model, std::slice::from_ref(&digest))
- .await
- .unwrap();
- assert!(
- retrieved_embeddings.is_empty(),
- "Old embeddings should have been purged"
- );
-}
@@ -1,96 +0,0 @@
-use chrono::Utc;
-
-use crate::{
- db::{Database, NewUserParams},
- test_both_dbs,
-};
-use std::sync::Arc;
-
-test_both_dbs!(
- test_accepted_tos,
- test_accepted_tos_postgres,
- test_accepted_tos_sqlite
-);
-
-async fn test_accepted_tos(db: &Arc<Database>) {
- let user_id = db
- .create_user(
- "user1@example.com",
- None,
- false,
- NewUserParams {
- github_login: "user1".to_string(),
- github_user_id: 1,
- },
- )
- .await
- .unwrap()
- .user_id;
-
- let user = db.get_user_by_id(user_id).await.unwrap().unwrap();
- assert!(user.accepted_tos_at.is_none());
-
- let accepted_tos_at = Utc::now().naive_utc();
- db.set_user_accepted_tos_at(user_id, Some(accepted_tos_at))
- .await
- .unwrap();
-
- let user = db.get_user_by_id(user_id).await.unwrap().unwrap();
- assert!(user.accepted_tos_at.is_some());
- assert_eq!(user.accepted_tos_at, Some(accepted_tos_at));
-
- db.set_user_accepted_tos_at(user_id, None).await.unwrap();
-
- let user = db.get_user_by_id(user_id).await.unwrap().unwrap();
- assert!(user.accepted_tos_at.is_none());
-}
-
-test_both_dbs!(
- test_destroy_user_cascade_deletes_access_tokens,
- test_destroy_user_cascade_deletes_access_tokens_postgres,
- test_destroy_user_cascade_deletes_access_tokens_sqlite
-);
-
-async fn test_destroy_user_cascade_deletes_access_tokens(db: &Arc<Database>) {
- let user_id = db
- .create_user(
- "user1@example.com",
- Some("user1"),
- false,
- NewUserParams {
- github_login: "user1".to_string(),
- github_user_id: 12345,
- },
- )
- .await
- .unwrap()
- .user_id;
-
- let user = db.get_user_by_id(user_id).await.unwrap();
- assert!(user.is_some());
-
- let token_1_id = db
- .create_access_token(user_id, None, "token-1", 10)
- .await
- .unwrap();
-
- let token_2_id = db
- .create_access_token(user_id, None, "token-2", 10)
- .await
- .unwrap();
-
- let token_1 = db.get_access_token(token_1_id).await;
- let token_2 = db.get_access_token(token_2_id).await;
- assert!(token_1.is_ok());
- assert!(token_2.is_ok());
-
- db.destroy_user(user_id).await.unwrap();
-
- let user = db.get_user_by_id(user_id).await.unwrap();
- assert!(user.is_none());
-
- let token_1 = db.get_access_token(token_1_id).await;
- let token_2 = db.get_access_token(token_2_id).await;
- assert!(token_1.is_err());
- assert!(token_2.is_err());
-}
@@ -13,7 +13,7 @@ use collab::llm::db::LlmDatabase;
use collab::migrations::run_database_migrations;
use collab::{
AppState, Config, Result, api::fetch_extensions_from_blob_store_periodically, db, env,
- executor::Executor, rpc::ResultExt,
+ executor::Executor,
};
use db::Database;
use std::{
@@ -95,8 +95,6 @@ async fn main() -> Result<()> {
let state = AppState::new(config, Executor::Production).await?;
if mode.is_collab() {
- state.db.purge_old_embeddings().await.trace_err();
-
let epoch = state
.db
.create_server(&state.config.zed_environment)
@@ -7065,7 +7065,7 @@ async fn test_remote_git_branches(
// Also try creating a new branch
cx_b.update(|cx| {
repo_b.update(cx, |repository, _cx| {
- repository.create_branch("totally-new-branch".to_string())
+ repository.create_branch("totally-new-branch".to_string(), None)
})
})
.await
@@ -326,7 +326,7 @@ async fn test_ssh_collaboration_git_branches(
// Also try creating a new branch
cx_b.update(|cx| {
repo_b.update(cx, |repo_b, _cx| {
- repo_b.create_branch("totally-new-branch".to_string())
+ repo_b.create_branch("totally-new-branch".to_string(), None)
})
})
.await
@@ -564,6 +564,20 @@ impl ProjectDiagnosticsEditor {
blocks.extend(more);
}
+ let cmp_excerpts = |buffer_snapshot: &BufferSnapshot,
+ a: &ExcerptRange<text::Anchor>,
+ b: &ExcerptRange<text::Anchor>| {
+ let context_start = || a.context.start.cmp(&b.context.start, buffer_snapshot);
+ let context_end = || a.context.end.cmp(&b.context.end, buffer_snapshot);
+ let primary_start = || a.primary.start.cmp(&b.primary.start, buffer_snapshot);
+ let primary_end = || a.primary.end.cmp(&b.primary.end, buffer_snapshot);
+ context_start()
+ .then_with(context_end)
+ .then_with(primary_start)
+ .then_with(primary_end)
+ .then(cmp::Ordering::Greater)
+ };
+
let mut excerpt_ranges: Vec<ExcerptRange<_>> = this.update(cx, |this, cx| {
this.multibuffer.update(cx, |multi_buffer, cx| {
let is_dirty = multi_buffer
@@ -575,10 +589,12 @@ impl ProjectDiagnosticsEditor {
.excerpts_for_buffer(buffer_id, cx)
.into_iter()
.map(|(_, range)| range)
+ .sorted_by(|a, b| cmp_excerpts(&buffer_snapshot, a, b))
.collect(),
}
})
})?;
+
let mut result_blocks = vec![None; excerpt_ranges.len()];
let context_lines = cx.update(|_, cx| multibuffer_context_lines(cx))?;
for b in blocks {
@@ -592,40 +608,14 @@ impl ProjectDiagnosticsEditor {
buffer_snapshot = cx.update(|_, cx| buffer.read(cx).snapshot())?;
let initial_range = buffer_snapshot.anchor_after(b.initial_range.start)
..buffer_snapshot.anchor_before(b.initial_range.end);
-
- let bin_search = |probe: &ExcerptRange<text::Anchor>| {
- let context_start = || {
- probe
- .context
- .start
- .cmp(&excerpt_range.start, &buffer_snapshot)
- };
- let context_end =
- || probe.context.end.cmp(&excerpt_range.end, &buffer_snapshot);
- let primary_start = || {
- probe
- .primary
- .start
- .cmp(&initial_range.start, &buffer_snapshot)
- };
- let primary_end =
- || probe.primary.end.cmp(&initial_range.end, &buffer_snapshot);
- context_start()
- .then_with(context_end)
- .then_with(primary_start)
- .then_with(primary_end)
- .then(cmp::Ordering::Greater)
+ let excerpt_range = ExcerptRange {
+ context: excerpt_range,
+ primary: initial_range,
};
let i = excerpt_ranges
- .binary_search_by(bin_search)
+ .binary_search_by(|probe| cmp_excerpts(&buffer_snapshot, probe, &excerpt_range))
.unwrap_or_else(|i| i);
- excerpt_ranges.insert(
- i,
- ExcerptRange {
- context: excerpt_range,
- primary: initial_range,
- },
- );
+ excerpt_ranges.insert(i, excerpt_range);
result_blocks.insert(i, Some(b));
}
@@ -128,20 +128,21 @@ impl Render for EditPredictionButton {
}),
);
}
- let this = cx.entity();
+ let this = cx.weak_entity();
div().child(
PopoverMenu::new("copilot")
.menu(move |window, cx| {
let current_status = Copilot::global(cx)?.read(cx).status();
- Some(match current_status {
+ match current_status {
Status::Authorized => this.update(cx, |this, cx| {
this.build_copilot_context_menu(window, cx)
}),
_ => this.update(cx, |this, cx| {
this.build_copilot_start_menu(window, cx)
}),
- })
+ }
+ .ok()
})
.anchor(Corner::BottomRight)
.trigger_with_tooltip(
@@ -182,7 +183,7 @@ impl Render for EditPredictionButton {
let icon = status.to_icon();
let tooltip_text = status.to_tooltip();
let has_menu = status.has_menu();
- let this = cx.entity();
+ let this = cx.weak_entity();
let fs = self.fs.clone();
div().child(
@@ -209,9 +210,11 @@ impl Render for EditPredictionButton {
)
}))
}
- SupermavenButtonStatus::Ready => Some(this.update(cx, |this, cx| {
- this.build_supermaven_context_menu(window, cx)
- })),
+ SupermavenButtonStatus::Ready => this
+ .update(cx, |this, cx| {
+ this.build_supermaven_context_menu(window, cx)
+ })
+ .ok(),
_ => None,
})
.anchor(Corner::BottomRight)
@@ -233,15 +236,16 @@ impl Render for EditPredictionButton {
let enabled = self.editor_enabled.unwrap_or(true);
let has_api_key = CodestralCompletionProvider::has_api_key(cx);
let fs = self.fs.clone();
- let this = cx.entity();
+ let this = cx.weak_entity();
div().child(
PopoverMenu::new("codestral")
.menu(move |window, cx| {
if has_api_key {
- Some(this.update(cx, |this, cx| {
+ this.update(cx, |this, cx| {
this.build_codestral_context_menu(window, cx)
- }))
+ })
+ .ok()
} else {
Some(ContextMenu::build(window, cx, |menu, _, _| {
let fs = fs.clone();
@@ -379,11 +383,12 @@ impl Render for EditPredictionButton {
})
});
- let this = cx.entity();
+ let this = cx.weak_entity();
let mut popover_menu = PopoverMenu::new("zeta")
.menu(move |window, cx| {
- Some(this.update(cx, |this, cx| this.build_zeta_context_menu(window, cx)))
+ this.update(cx, |this, cx| this.build_zeta_context_menu(window, cx))
+ .ok()
})
.anchor(Corner::BottomRight)
.with_handle(self.popover_menu_handle.clone());
@@ -213,15 +213,6 @@ pub struct ExpandExcerptsDown {
pub(super) lines: u32,
}
-/// Shows code completion suggestions at the cursor position.
-#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
-#[action(namespace = editor)]
-#[serde(deny_unknown_fields)]
-pub struct ShowCompletions {
- #[serde(default)]
- pub(super) trigger: Option<String>,
-}
-
/// Handles text input in the editor.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
@@ -736,6 +727,8 @@ actions!(
SelectToStartOfParagraph,
/// Extends selection up.
SelectUp,
+ /// Shows code completion suggestions at the cursor position.
+ ShowCompletions,
/// Shows the system character palette.
ShowCharacterPalette,
/// Shows edit prediction at cursor.
@@ -252,8 +252,17 @@ enum MarkdownCacheKey {
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum CompletionsMenuSource {
+ /// Show all completions (words, snippets, LSP)
Normal,
+ /// Show only snippets (not words or LSP)
+ ///
+ /// Used after typing a non-word character
+ SnippetsOnly,
+ /// Tab stops within a snippet that have a predefined finite set of choices
SnippetChoices,
+ /// Show only words (not snippets or LSP)
+ ///
+ /// Used when word completions are explicitly triggered
Words { ignore_threshold: bool },
}
@@ -19,7 +19,7 @@ use std::{
cell::RefCell,
cmp::{self, Ordering},
fmt::Debug,
- ops::{Deref, DerefMut, Range, RangeBounds, RangeInclusive},
+ ops::{Deref, DerefMut, Not, Range, RangeBounds, RangeInclusive},
sync::{
Arc,
atomic::{AtomicUsize, Ordering::SeqCst},
@@ -1879,18 +1879,14 @@ impl Iterator for BlockRows<'_> {
}
let transform = self.transforms.item()?;
- if let Some(block) = transform.block.as_ref() {
- if block.is_replacement() && self.transforms.start().0 == self.output_row {
- if matches!(block, Block::FoldedBuffer { .. }) {
- Some(RowInfo::default())
- } else {
- Some(self.input_rows.next().unwrap())
- }
- } else {
- Some(RowInfo::default())
- }
+ if transform.block.as_ref().is_none_or(|block| {
+ block.is_replacement()
+ && self.transforms.start().0 == self.output_row
+ && matches!(block, Block::FoldedBuffer { .. }).not()
+ }) {
+ self.input_rows.next()
} else {
- Some(self.input_rows.next().unwrap())
+ Some(RowInfo::default())
}
}
}
@@ -965,7 +965,7 @@ impl<'a> Iterator for WrapChunks<'a> {
}
if self.input_chunk.text.is_empty() {
- self.input_chunk = self.input_chunks.next().unwrap();
+ self.input_chunk = self.input_chunks.next()?;
}
let mut input_len = 0;
@@ -75,7 +75,7 @@ use ::git::{
blame::{BlameEntry, ParsedCommitMessage},
status::FileStatus,
};
-use aho_corasick::AhoCorasick;
+use aho_corasick::{AhoCorasick, AhoCorasickBuilder, BuildError};
use anyhow::{Context as _, Result, anyhow};
use blink_manager::BlinkManager;
use buffer_diff::DiffHunkStatus;
@@ -118,7 +118,7 @@ use language::{
AutoindentMode, BlockCommentConfig, BracketMatch, BracketPair, Buffer, BufferRow,
BufferSnapshot, Capability, CharClassifier, CharKind, CharScopeContext, CodeLabel, CursorShape,
DiagnosticEntryRef, DiffOptions, EditPredictionsMode, EditPreview, HighlightedText, IndentKind,
- IndentSize, Language, LanguageName, OffsetRangeExt, Point, Runnable, RunnableRange, Selection,
+ IndentSize, Language, LanguageName, OffsetRangeExt, OutlineItem, Point, Runnable, RunnableRange, Selection,
SelectionGoal, TextObject, TransactionId, TreeSitterOptions, WordsQuery,
language_settings::{
self, LanguageSettings, LspInsertMode, RewrapBehavior, WordsCompletionMode,
@@ -1185,12 +1185,14 @@ pub struct Editor {
hide_mouse_mode: HideMouseMode,
pub change_list: ChangeList,
inline_value_cache: InlineValueCache,
+
selection_drag_state: SelectionDragState,
colors: Option<LspColorData>,
post_scroll_update: Task<()>,
refresh_colors_task: Task<()>,
inlay_hints: Option<LspInlayHintData>,
folding_newlines: Task<()>,
+ select_next_is_case_sensitive: Option<bool>,
pub lookup_key: Option<Box<dyn Any + Send + Sync>>,
applicable_language_settings: HashMap<Option<LanguageName>, LanguageSettings>,
fetched_tree_sitter_chunks: HashMap<ExcerptId, HashSet<Range<BufferRow>>>,
@@ -1768,6 +1770,51 @@ impl Editor {
Editor::new_internal(mode, buffer, project, None, window, cx)
}
+ pub fn sticky_headers(&self, cx: &App) -> Option<Vec<OutlineItem<Anchor>>> {
+ let multi_buffer = self.buffer().read(cx);
+ let multi_buffer_snapshot = multi_buffer.snapshot(cx);
+ let multi_buffer_visible_start = self
+ .scroll_manager
+ .anchor()
+ .anchor
+ .to_point(&multi_buffer_snapshot);
+ let max_row = multi_buffer_snapshot.max_point().row;
+
+ let start_row = (multi_buffer_visible_start.row).min(max_row);
+ let end_row = (multi_buffer_visible_start.row + 10).min(max_row);
+
+ if let Some((excerpt_id, buffer_id, buffer)) = multi_buffer.read(cx).as_singleton() {
+ let outline_items = buffer
+ .outline_items_containing(
+ Point::new(start_row, 0)..Point::new(end_row, 0),
+ true,
+ self.style().map(|style| style.syntax.as_ref()),
+ )
+ .into_iter()
+ .map(|outline_item| OutlineItem {
+ depth: outline_item.depth,
+ range: Anchor::range_in_buffer(*excerpt_id, buffer_id, outline_item.range),
+ source_range_for_text: Anchor::range_in_buffer(
+ *excerpt_id,
+ buffer_id,
+ outline_item.source_range_for_text,
+ ),
+ text: outline_item.text,
+ highlight_ranges: outline_item.highlight_ranges,
+ name_ranges: outline_item.name_ranges,
+ body_range: outline_item
+ .body_range
+ .map(|range| Anchor::range_in_buffer(*excerpt_id, buffer_id, range)),
+ annotation_range: outline_item
+ .annotation_range
+ .map(|range| Anchor::range_in_buffer(*excerpt_id, buffer_id, range)),
+ });
+ return Some(outline_items.collect());
+ }
+
+ None
+ }
+
fn new_internal(
mode: EditorMode,
multi_buffer: Entity<MultiBuffer>,
@@ -2291,6 +2338,7 @@ impl Editor {
selection_drag_state: SelectionDragState::None,
folding_newlines: Task::ready(()),
lookup_key: None,
+ select_next_is_case_sensitive: None,
applicable_language_settings: HashMap::default(),
fetched_tree_sitter_chunks: HashMap::default(),
};
@@ -3238,7 +3286,7 @@ impl Editor {
};
if continue_showing {
- self.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ self.open_or_update_completions_menu(None, None, false, window, cx);
} else {
self.hide_context_menu(window, cx);
}
@@ -3423,6 +3471,21 @@ impl Editor {
Subscription::join(other_subscription, this_subscription)
}
+ fn unfold_buffers_with_selections(&mut self, cx: &mut Context<Self>) {
+ if self.buffer().read(cx).is_singleton() {
+ return;
+ }
+ let snapshot = self.buffer.read(cx).snapshot(cx);
+ let buffer_ids: HashSet<BufferId> = self
+ .selections
+ .disjoint_anchor_ranges()
+ .flat_map(|range| snapshot.buffer_ids_for_range(range))
+ .collect();
+ for buffer_id in buffer_ids {
+ self.unfold_buffer(buffer_id, cx);
+ }
+ }
+
/// Changes selections using the provided mutation function. Changes to `self.selections` occur
/// immediately, but when run within `transact` or `with_selection_effects_deferred` other
/// effects of selection change occur at the end of the transaction.
@@ -4035,17 +4098,24 @@ impl Editor {
self.selection_mark_mode = false;
self.selection_drag_state = SelectionDragState::None;
+ if self.dismiss_menus_and_popups(true, window, cx) {
+ cx.notify();
+ return;
+ }
if self.clear_expanded_diff_hunks(cx) {
cx.notify();
return;
}
- if self.dismiss_menus_and_popups(true, window, cx) {
+ if self.show_git_blame_gutter {
+ self.show_git_blame_gutter = false;
+ cx.notify();
return;
}
if self.mode.is_full()
&& self.change_selections(Default::default(), window, cx, |s| s.try_cancel())
{
+ cx.notify();
return;
}
@@ -4164,6 +4234,8 @@ impl Editor {
self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
+ self.unfold_buffers_with_selections(cx);
+
let selections = self.selections.all_adjusted(&self.display_snapshot(cx));
let mut bracket_inserted = false;
let mut edits = Vec::new();
@@ -5073,57 +5145,18 @@ impl Editor {
ignore_threshold: false,
}),
None,
- window,
- cx,
- );
- }
- Some(CompletionsMenuSource::Normal)
- | Some(CompletionsMenuSource::SnippetChoices)
- | None
- if self.is_completion_trigger(
- text,
trigger_in_words,
- completions_source.is_some(),
- cx,
- ) =>
- {
- self.show_completions(
- &ShowCompletions {
- trigger: Some(text.to_owned()).filter(|x| !x.is_empty()),
- },
window,
cx,
- )
- }
- _ => {
- self.hide_context_menu(window, cx);
+ );
}
- }
- }
-
- fn is_completion_trigger(
- &self,
- text: &str,
- trigger_in_words: bool,
- menu_is_open: bool,
- cx: &mut Context<Self>,
- ) -> bool {
- let position = self.selections.newest_anchor().head();
- let Some(buffer) = self.buffer.read(cx).buffer_for_anchor(position, cx) else {
- return false;
- };
-
- if let Some(completion_provider) = &self.completion_provider {
- completion_provider.is_completion_trigger(
- &buffer,
- position.text_anchor,
- text,
- trigger_in_words,
- menu_is_open,
+ _ => self.open_or_update_completions_menu(
+ None,
+ Some(text.to_owned()).filter(|x| !x.is_empty()),
+ true,
+ window,
cx,
- )
- } else {
- false
+ ),
}
}
@@ -5401,6 +5434,7 @@ impl Editor {
ignore_threshold: true,
}),
None,
+ false,
window,
cx,
);
@@ -5408,17 +5442,18 @@ impl Editor {
pub fn show_completions(
&mut self,
- options: &ShowCompletions,
+ _: &ShowCompletions,
window: &mut Window,
cx: &mut Context<Self>,
) {
- self.open_or_update_completions_menu(None, options.trigger.as_deref(), window, cx);
+ self.open_or_update_completions_menu(None, None, false, window, cx);
}
fn open_or_update_completions_menu(
&mut self,
requested_source: Option<CompletionsMenuSource>,
- trigger: Option<&str>,
+ trigger: Option<String>,
+ trigger_in_words: bool,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -5426,6 +5461,15 @@ impl Editor {
return;
}
+ let completions_source = self
+ .context_menu
+ .borrow()
+ .as_ref()
+ .and_then(|menu| match menu {
+ CodeContextMenu::Completions(completions_menu) => Some(completions_menu.source),
+ CodeContextMenu::CodeActions(_) => None,
+ });
+
let multibuffer_snapshot = self.buffer.read(cx).read(cx);
// Typically `start` == `end`, but with snippet tabstop choices the default choice is
@@ -5473,7 +5517,8 @@ impl Editor {
ignore_word_threshold = ignore_threshold;
None
}
- Some(CompletionsMenuSource::SnippetChoices) => {
+ Some(CompletionsMenuSource::SnippetChoices)
+ | Some(CompletionsMenuSource::SnippetsOnly) => {
log::error!("bug: SnippetChoices requested_source is not handled");
None
}
@@ -5487,13 +5532,19 @@ impl Editor {
.as_ref()
.is_none_or(|provider| provider.filter_completions());
+ let was_snippets_only = matches!(
+ completions_source,
+ Some(CompletionsMenuSource::SnippetsOnly)
+ );
+
if let Some(CodeContextMenu::Completions(menu)) = self.context_menu.borrow_mut().as_mut() {
if filter_completions {
menu.filter(query.clone(), provider.clone(), window, cx);
}
// When `is_incomplete` is false, no need to re-query completions when the current query
// is a suffix of the initial query.
- if !menu.is_incomplete {
+ let was_complete = !menu.is_incomplete;
+ if was_complete && !was_snippets_only {
// If the new query is a suffix of the old query (typing more characters) and
// the previous result was complete, the existing completions can be filtered.
//
@@ -5517,23 +5568,6 @@ impl Editor {
}
};
- let trigger_kind = match trigger {
- Some(trigger) if buffer.read(cx).completion_triggers().contains(trigger) => {
- CompletionTriggerKind::TRIGGER_CHARACTER
- }
- _ => CompletionTriggerKind::INVOKED,
- };
- let completion_context = CompletionContext {
- trigger_character: trigger.and_then(|trigger| {
- if trigger_kind == CompletionTriggerKind::TRIGGER_CHARACTER {
- Some(String::from(trigger))
- } else {
- None
- }
- }),
- trigger_kind,
- };
-
let Anchor {
excerpt_id: buffer_excerpt_id,
text_anchor: buffer_position,
@@ -5586,54 +5620,88 @@ impl Editor {
.as_ref()
.is_none_or(|query| !query.chars().any(|c| c.is_digit(10)));
- let omit_word_completions = !self.word_completions_enabled
- || (!ignore_word_threshold
- && match &query {
- Some(query) => query.chars().count() < completion_settings.words_min_length,
- None => completion_settings.words_min_length != 0,
- });
-
- let (mut words, provider_responses) = match &provider {
- Some(provider) => {
- let provider_responses = provider.completions(
- buffer_excerpt_id,
+ let load_provider_completions = provider.as_ref().is_some_and(|provider| {
+ trigger.as_ref().is_none_or(|trigger| {
+ provider.is_completion_trigger(
&buffer,
- buffer_position,
- completion_context,
- window,
+ position.text_anchor,
+ trigger,
+ trigger_in_words,
+ completions_source.is_some(),
cx,
- );
+ )
+ })
+ });
- let words = match (omit_word_completions, completion_settings.words) {
- (true, _) | (_, WordsCompletionMode::Disabled) => {
- Task::ready(BTreeMap::default())
- }
- (false, WordsCompletionMode::Enabled | WordsCompletionMode::Fallback) => cx
- .background_spawn(async move {
- buffer_snapshot.words_in_range(WordsQuery {
- fuzzy_contents: None,
- range: word_search_range,
- skip_digits,
- })
- }),
- };
+ let provider_responses = if let Some(provider) = &provider
+ && load_provider_completions
+ {
+ let trigger_character =
+ trigger.filter(|trigger| buffer.read(cx).completion_triggers().contains(trigger));
+ let completion_context = CompletionContext {
+ trigger_kind: match &trigger_character {
+ Some(_) => CompletionTriggerKind::TRIGGER_CHARACTER,
+ None => CompletionTriggerKind::INVOKED,
+ },
+ trigger_character,
+ };
- (words, provider_responses)
- }
- None => {
- let words = if omit_word_completions {
- Task::ready(BTreeMap::default())
- } else {
- cx.background_spawn(async move {
- buffer_snapshot.words_in_range(WordsQuery {
- fuzzy_contents: None,
- range: word_search_range,
- skip_digits,
- })
- })
- };
- (words, Task::ready(Ok(Vec::new())))
- }
+ provider.completions(
+ buffer_excerpt_id,
+ &buffer,
+ buffer_position,
+ completion_context,
+ window,
+ cx,
+ )
+ } else {
+ Task::ready(Ok(Vec::new()))
+ };
+
+ let load_word_completions = if !self.word_completions_enabled {
+ false
+ } else if requested_source
+ == Some(CompletionsMenuSource::Words {
+ ignore_threshold: true,
+ })
+ {
+ true
+ } else {
+ load_provider_completions
+ && completion_settings.words != WordsCompletionMode::Disabled
+ && (ignore_word_threshold || {
+ let words_min_length = completion_settings.words_min_length;
+ // check whether word has at least `words_min_length` characters
+ let query_chars = query.iter().flat_map(|q| q.chars());
+ query_chars.take(words_min_length).count() == words_min_length
+ })
+ };
+
+ let mut words = if load_word_completions {
+ cx.background_spawn(async move {
+ buffer_snapshot.words_in_range(WordsQuery {
+ fuzzy_contents: None,
+ range: word_search_range,
+ skip_digits,
+ })
+ })
+ } else {
+ Task::ready(BTreeMap::default())
+ };
+
+ let snippets = if let Some(provider) = &provider
+ && provider.show_snippets()
+ && let Some(project) = self.project()
+ {
+ project.update(cx, |project, cx| {
+ snippet_completions(project, &buffer, buffer_position, cx)
+ })
+ } else {
+ Task::ready(Ok(CompletionResponse {
+ completions: Vec::new(),
+ display_options: Default::default(),
+ is_incomplete: false,
+ }))
};
let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order;
@@ -5691,6 +5759,13 @@ impl Editor {
confirm: None,
}));
+ completions.extend(
+ snippets
+ .await
+ .into_iter()
+ .flat_map(|response| response.completions),
+ );
+
let menu = if completions.is_empty() {
None
} else {
@@ -5702,7 +5777,11 @@ impl Editor {
.map(|workspace| workspace.read(cx).app_state().languages.clone());
let menu = CompletionsMenu::new(
id,
- requested_source.unwrap_or(CompletionsMenuSource::Normal),
+ requested_source.unwrap_or(if load_provider_completions {
+ CompletionsMenuSource::Normal
+ } else {
+ CompletionsMenuSource::SnippetsOnly
+ }),
sort_completions,
show_completion_documentation,
position,
@@ -6032,7 +6111,7 @@ impl Editor {
.as_ref()
.is_some_and(|confirm| confirm(intent, window, cx));
if show_new_completions_on_confirm {
- self.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ self.open_or_update_completions_menu(None, None, false, window, cx);
}
let provider = self.completion_provider.as_ref()?;
@@ -12828,6 +12907,10 @@ impl Editor {
});
}
+ // π€ | .. | show_in_menu |
+ // | .. | true true
+ // | had_edit_prediction | false true
+
let trigger_in_words =
this.show_edit_predictions_in_menu() || !had_active_edit_prediction;
@@ -14610,7 +14693,7 @@ impl Editor {
.collect::<String>();
let is_empty = query.is_empty();
let select_state = SelectNextState {
- query: AhoCorasick::new(&[query])?,
+ query: self.build_query(&[query], cx)?,
wordwise: true,
done: is_empty,
};
@@ -14620,7 +14703,7 @@ impl Editor {
}
} else if let Some(selected_text) = selected_text {
self.select_next_state = Some(SelectNextState {
- query: AhoCorasick::new(&[selected_text])?,
+ query: self.build_query(&[selected_text], cx)?,
wordwise: false,
done: false,
});
@@ -14828,7 +14911,7 @@ impl Editor {
.collect::<String>();
let is_empty = query.is_empty();
let select_state = SelectNextState {
- query: AhoCorasick::new(&[query.chars().rev().collect::<String>()])?,
+ query: self.build_query(&[query.chars().rev().collect::<String>()], cx)?,
wordwise: true,
done: is_empty,
};
@@ -14838,7 +14921,8 @@ impl Editor {
}
} else if let Some(selected_text) = selected_text {
self.select_prev_state = Some(SelectNextState {
- query: AhoCorasick::new(&[selected_text.chars().rev().collect::<String>()])?,
+ query: self
+ .build_query(&[selected_text.chars().rev().collect::<String>()], cx)?,
wordwise: false,
done: false,
});
@@ -14848,6 +14932,25 @@ impl Editor {
Ok(())
}
+ /// Builds an `AhoCorasick` automaton from the provided patterns, while
+ /// setting the case sensitivity based on the global
+ /// `SelectNextCaseSensitive` setting, if set, otherwise based on the
+ /// editor's settings.
+ fn build_query<I, P>(&self, patterns: I, cx: &Context<Self>) -> Result<AhoCorasick, BuildError>
+ where
+ I: IntoIterator<Item = P>,
+ P: AsRef<[u8]>,
+ {
+ let case_sensitive = self.select_next_is_case_sensitive.map_or_else(
+ || EditorSettings::get_global(cx).search.case_sensitive,
+ |value| value,
+ );
+
+ let mut builder = AhoCorasickBuilder::new();
+ builder.ascii_case_insensitive(!case_sensitive);
+ builder.build(patterns)
+ }
+
pub fn find_next_match(
&mut self,
_: &FindNextMatch,
@@ -18822,10 +18925,17 @@ impl Editor {
if self.buffer().read(cx).is_singleton() || self.is_buffer_folded(buffer_id, cx) {
return;
}
+
let folded_excerpts = self.buffer().read(cx).excerpts_for_buffer(buffer_id, cx);
self.display_map.update(cx, |display_map, cx| {
display_map.fold_buffers([buffer_id], cx)
});
+
+ let snapshot = self.display_snapshot(cx);
+ self.selections.change_with(&snapshot, |selections| {
+ selections.remove_selections_from_buffer(buffer_id);
+ });
+
cx.emit(EditorEvent::BufferFoldToggled {
ids: folded_excerpts.iter().map(|&(id, _)| id).collect(),
folded: true,
@@ -23082,6 +23192,10 @@ pub trait CompletionProvider {
fn filter_completions(&self) -> bool {
true
}
+
+ fn show_snippets(&self) -> bool {
+ false
+ }
}
pub trait CodeActionProvider {
@@ -23342,16 +23456,8 @@ impl CompletionProvider for Entity<Project> {
cx: &mut Context<Editor>,
) -> Task<Result<Vec<CompletionResponse>>> {
self.update(cx, |project, cx| {
- let snippets = snippet_completions(project, buffer, buffer_position, cx);
- let project_completions = project.completions(buffer, buffer_position, options, cx);
- cx.background_spawn(async move {
- let mut responses = project_completions.await?;
- let snippets = snippets.await?;
- if !snippets.completions.is_empty() {
- responses.push(snippets);
- }
- Ok(responses)
- })
+ let task = project.completions(buffer, buffer_position, options, cx);
+ cx.background_spawn(task)
})
}
@@ -23423,6 +23529,10 @@ impl CompletionProvider for Entity<Project> {
buffer.completion_triggers().contains(text)
}
+
+ fn show_snippets(&self) -> bool {
+ true
+ }
}
impl SemanticsProvider for Entity<Project> {
@@ -33,6 +33,7 @@ pub struct EditorSettings {
pub horizontal_scroll_margin: f32,
pub scroll_sensitivity: f32,
pub fast_scroll_sensitivity: f32,
+ pub sticky_scroll: StickyScroll,
pub relative_line_numbers: RelativeLineNumbers,
pub seed_search_query_from_cursor: SeedQuerySetting,
pub use_smartcase_search: bool,
@@ -65,6 +66,11 @@ pub struct Jupyter {
pub enabled: bool,
}
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct StickyScroll {
+ pub enabled: bool,
+}
+
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Toolbar {
pub breadcrumbs: bool,
@@ -156,10 +162,15 @@ pub struct DragAndDropSelection {
pub struct SearchSettings {
/// Whether to show the project search button in the status bar.
pub button: bool,
+ /// Whether to only match on whole words.
pub whole_word: bool,
+ /// Whether to match case sensitively.
pub case_sensitive: bool,
+ /// Whether to include gitignored files in search results.
pub include_ignored: bool,
+ /// Whether to interpret the search query as a regular expression.
pub regex: bool,
+ /// Whether to center the cursor on each search match when navigating.
pub center_on_match: bool,
}
@@ -185,6 +196,7 @@ impl Settings for EditorSettings {
let toolbar = editor.toolbar.unwrap();
let search = editor.search.unwrap();
let drag_and_drop_selection = editor.drag_and_drop_selection.unwrap();
+ let sticky_scroll = editor.sticky_scroll.unwrap();
Self {
cursor_blink: editor.cursor_blink.unwrap(),
cursor_shape: editor.cursor_shape.map(Into::into),
@@ -235,6 +247,9 @@ impl Settings for EditorSettings {
horizontal_scroll_margin: editor.horizontal_scroll_margin.unwrap(),
scroll_sensitivity: editor.scroll_sensitivity.unwrap(),
fast_scroll_sensitivity: editor.fast_scroll_sensitivity.unwrap(),
+ sticky_scroll: StickyScroll {
+ enabled: sticky_scroll.enabled.unwrap(),
+ },
relative_line_numbers: editor.relative_line_numbers.unwrap(),
seed_search_query_from_cursor: editor.seed_search_query_from_cursor.unwrap(),
use_smartcase_search: editor.use_smartcase_search.unwrap(),
@@ -3,6 +3,7 @@ use crate::{
JoinLines,
code_context_menus::CodeContextMenu,
edit_prediction_tests::FakeEditPredictionProvider,
+ element::StickyHeader,
linked_editing_ranges::LinkedEditingRanges,
scroll::scroll_amount::ScrollAmount,
test::{
@@ -43,8 +44,8 @@ use project::{
};
use serde_json::{self, json};
use settings::{
- AllLanguageSettingsContent, IndentGuideBackgroundColoring, IndentGuideColoring,
- ProjectSettingsContent,
+ AllLanguageSettingsContent, EditorSettingsContent, IndentGuideBackgroundColoring,
+ IndentGuideColoring, ProjectSettingsContent, SearchSettingsContent,
};
use std::{cell::RefCell, future::Future, rc::Rc, sync::atomic::AtomicBool, time::Instant};
use std::{
@@ -8313,8 +8314,15 @@ async fn test_add_selection_above_below_multi_cursor_existing_state(cx: &mut Tes
#[gpui::test]
async fn test_select_next(cx: &mut TestAppContext) {
init_test(cx, |_| {});
-
let mut cx = EditorTestContext::new(cx).await;
+
+ // Enable case sensitive search.
+ update_test_editor_settings(&mut cx, |settings| {
+ let mut search_settings = SearchSettingsContent::default();
+ search_settings.case_sensitive = Some(true);
+ settings.search = Some(search_settings);
+ });
+
cx.set_state("abc\nΛabc abc\ndefabc\nabc");
cx.update_editor(|e, window, cx| e.select_next(&SelectNext::default(), window, cx))
@@ -8345,14 +8353,41 @@ async fn test_select_next(cx: &mut TestAppContext) {
cx.update_editor(|e, window, cx| e.select_next(&SelectNext::default(), window, cx))
.unwrap();
cx.assert_editor_state("abc\nΒ«ΛabcΒ» Β«ΛabcΒ»\ndefabc\nabc");
+
+ // Test case sensitivity
+ cx.set_state("Β«ΛfooΒ»\nFOO\nFoo\nfoo");
+ cx.update_editor(|e, window, cx| {
+ e.select_next(&SelectNext::default(), window, cx).unwrap();
+ });
+ cx.assert_editor_state("Β«ΛfooΒ»\nFOO\nFoo\nΒ«ΛfooΒ»");
+
+ // Disable case sensitive search.
+ update_test_editor_settings(&mut cx, |settings| {
+ let mut search_settings = SearchSettingsContent::default();
+ search_settings.case_sensitive = Some(false);
+ settings.search = Some(search_settings);
+ });
+
+ cx.set_state("Β«ΛfooΒ»\nFOO\nFoo");
+ cx.update_editor(|e, window, cx| {
+ e.select_next(&SelectNext::default(), window, cx).unwrap();
+ e.select_next(&SelectNext::default(), window, cx).unwrap();
+ });
+ cx.assert_editor_state("Β«ΛfooΒ»\nΒ«ΛFOOΒ»\nΒ«ΛFooΒ»");
}
#[gpui::test]
async fn test_select_all_matches(cx: &mut TestAppContext) {
init_test(cx, |_| {});
-
let mut cx = EditorTestContext::new(cx).await;
+ // Enable case sensitive search.
+ update_test_editor_settings(&mut cx, |settings| {
+ let mut search_settings = SearchSettingsContent::default();
+ search_settings.case_sensitive = Some(true);
+ settings.search = Some(search_settings);
+ });
+
// Test caret-only selections
cx.set_state("abc\nΛabc abc\ndefabc\nabc");
cx.update_editor(|e, window, cx| e.select_all_matches(&SelectAllMatches, window, cx))
@@ -8397,6 +8432,26 @@ async fn test_select_all_matches(cx: &mut TestAppContext) {
e.set_clip_at_line_ends(false, cx);
});
cx.assert_editor_state("Β«abcΛΒ»");
+
+ // Test case sensitivity
+ cx.set_state("fΛoo\nFOO\nFoo");
+ cx.update_editor(|e, window, cx| {
+ e.select_all_matches(&SelectAllMatches, window, cx).unwrap();
+ });
+ cx.assert_editor_state("Β«fooΛΒ»\nFOO\nFoo");
+
+ // Disable case sensitive search.
+ update_test_editor_settings(&mut cx, |settings| {
+ let mut search_settings = SearchSettingsContent::default();
+ search_settings.case_sensitive = Some(false);
+ settings.search = Some(search_settings);
+ });
+
+ cx.set_state("fΛoo\nFOO\nFoo");
+ cx.update_editor(|e, window, cx| {
+ e.select_all_matches(&SelectAllMatches, window, cx).unwrap();
+ });
+ cx.assert_editor_state("Β«fooΛΒ»\nΒ«FOOΛΒ»\nΒ«FooΛΒ»");
}
#[gpui::test]
@@ -8768,8 +8823,15 @@ let foo = Β«2ΛΒ»;"#,
#[gpui::test]
async fn test_select_previous_with_single_selection(cx: &mut TestAppContext) {
init_test(cx, |_| {});
-
let mut cx = EditorTestContext::new(cx).await;
+
+ // Enable case sensitive search.
+ update_test_editor_settings(&mut cx, |settings| {
+ let mut search_settings = SearchSettingsContent::default();
+ search_settings.case_sensitive = Some(true);
+ settings.search = Some(search_settings);
+ });
+
cx.set_state("abc\nΒ«ΛabcΒ» abc\ndefabc\nabc");
cx.update_editor(|e, window, cx| e.select_previous(&SelectPrevious::default(), window, cx))
@@ -8794,6 +8856,32 @@ async fn test_select_previous_with_single_selection(cx: &mut TestAppContext) {
cx.update_editor(|e, window, cx| e.select_previous(&SelectPrevious::default(), window, cx))
.unwrap();
cx.assert_editor_state("Β«ΛabcΒ»\nΒ«ΛabcΒ» Β«ΛabcΒ»\ndefΒ«ΛabcΒ»\nΒ«ΛabcΒ»");
+
+ // Test case sensitivity
+ cx.set_state("foo\nFOO\nFoo\nΒ«ΛfooΒ»");
+ cx.update_editor(|e, window, cx| {
+ e.select_previous(&SelectPrevious::default(), window, cx)
+ .unwrap();
+ e.select_previous(&SelectPrevious::default(), window, cx)
+ .unwrap();
+ });
+ cx.assert_editor_state("Β«ΛfooΒ»\nFOO\nFoo\nΒ«ΛfooΒ»");
+
+ // Disable case sensitive search.
+ update_test_editor_settings(&mut cx, |settings| {
+ let mut search_settings = SearchSettingsContent::default();
+ search_settings.case_sensitive = Some(false);
+ settings.search = Some(search_settings);
+ });
+
+ cx.set_state("foo\nFOO\nΒ«ΛFooΒ»");
+ cx.update_editor(|e, window, cx| {
+ e.select_previous(&SelectPrevious::default(), window, cx)
+ .unwrap();
+ e.select_previous(&SelectPrevious::default(), window, cx)
+ .unwrap();
+ });
+ cx.assert_editor_state("Β«ΛfooΒ»\nΒ«ΛFOOΒ»\nΒ«ΛFooΒ»");
}
#[gpui::test]
@@ -13826,7 +13914,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
cx.set_state(&run.initial_state);
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
let counter = Arc::new(AtomicUsize::new(0));
@@ -13886,7 +13974,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext)
cx.set_state(initial_state);
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
let counter = Arc::new(AtomicUsize::new(0));
@@ -13922,7 +14010,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext)
cx.set_state(initial_state);
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
handle_completion_request_with_insert_and_replace(
&mut cx,
@@ -14009,7 +14097,7 @@ async fn test_completion_replacing_surrounding_text_with_multicursors(cx: &mut T
"};
cx.set_state(initial_state);
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
handle_completion_request_with_insert_and_replace(
&mut cx,
@@ -14063,7 +14151,7 @@ async fn test_completion_replacing_surrounding_text_with_multicursors(cx: &mut T
"};
cx.set_state(initial_state);
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
handle_completion_request_with_insert_and_replace(
&mut cx,
@@ -14112,7 +14200,7 @@ async fn test_completion_replacing_surrounding_text_with_multicursors(cx: &mut T
"};
cx.set_state(initial_state);
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
handle_completion_request_with_insert_and_replace(
&mut cx,
@@ -14263,7 +14351,7 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte
});
editor.update_in(cx, |editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
fake_server
@@ -14502,7 +14590,7 @@ async fn test_completion(cx: &mut TestAppContext) {
cx.assert_editor_state("editor.cloΛ");
assert!(cx.editor(|e, _, _| e.context_menu.borrow_mut().is_none()));
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
handle_completion_request(
"editor.<clo|>",
@@ -14901,7 +14989,7 @@ async fn test_word_completions_usually_skip_digits(cx: &mut TestAppContext) {
4.5f32
"});
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions::default(), window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
cx.executor().run_until_parked();
cx.condition(|editor, _| editor.context_menu_visible())
@@ -14927,7 +15015,7 @@ async fn test_word_completions_usually_skip_digits(cx: &mut TestAppContext) {
33.35f32
"});
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions::default(), window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
cx.executor().run_until_parked();
cx.condition(|editor, _| editor.context_menu_visible())
@@ -15055,6 +15143,35 @@ async fn test_word_completions_disabled(cx: &mut TestAppContext) {
});
}
+#[gpui::test]
+async fn test_word_completions_disabled_with_no_provider(cx: &mut TestAppContext) {
+ init_test(cx, |language_settings| {
+ language_settings.defaults.completions = Some(CompletionSettingsContent {
+ words: Some(WordsCompletionMode::Disabled),
+ words_min_length: Some(0),
+ lsp_insert_mode: Some(LspInsertMode::Insert),
+ ..Default::default()
+ });
+ });
+
+ let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await;
+ cx.update_editor(|editor, _, _| {
+ editor.set_completion_provider(None);
+ });
+ cx.set_state(indoc! {"Λ
+ wow
+ wowen
+ wowser
+ "});
+ cx.simulate_keystroke("w");
+ cx.executor().run_until_parked();
+ cx.update_editor(|editor, _, _| {
+ if editor.context_menu.borrow_mut().is_some() {
+ panic!("expected completion menu to be hidden, as disabled in settings");
+ }
+ });
+}
+
fn gen_text_edit(params: &CompletionParams, text: &str) -> Option<lsp::CompletionTextEdit> {
let position = || lsp::Position {
line: params.text_document_position.position.line,
@@ -15351,13 +15468,7 @@ async fn test_as_is_completions(cx: &mut TestAppContext) {
cx.set_state("fn a() {}\n nΛ");
cx.executor().run_until_parked();
cx.update_editor(|editor, window, cx| {
- editor.show_completions(
- &ShowCompletions {
- trigger: Some("\n".into()),
- },
- window,
- cx,
- );
+ editor.trigger_completion_on_input("n", true, window, cx)
});
cx.executor().run_until_parked();
@@ -15455,7 +15566,7 @@ int fn_branch(bool do_branch1, bool do_branch2);
})))
});
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
cx.executor().run_until_parked();
cx.update_editor(|editor, window, cx| {
@@ -15504,7 +15615,7 @@ int fn_branch(bool do_branch1, bool do_branch2);
})))
});
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
cx.executor().run_until_parked();
cx.update_editor(|editor, window, cx| {
@@ -17994,7 +18105,7 @@ async fn test_context_menus_hide_hover_popover(cx: &mut gpui::TestAppContext) {
}
});
cx.update_editor(|editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
completion_requests.next().await;
cx.condition(|editor, _| editor.context_menu_visible())
@@ -22267,7 +22378,7 @@ async fn test_folding_buffers(cx: &mut TestAppContext) {
assert_eq!(
multi_buffer_editor.update(cx, |editor, cx| editor.display_text(cx)),
- "\n\nB\n\n\n\n\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n",
+ "\n\naaaa\nBbbbb\ncccc\n\n\nffff\ngggg\n\n\njjjj\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n",
"After unfolding the first buffer, its and 2nd buffer's text should be displayed"
);
@@ -22276,7 +22387,7 @@ async fn test_folding_buffers(cx: &mut TestAppContext) {
});
assert_eq!(
multi_buffer_editor.update(cx, |editor, cx| editor.display_text(cx)),
- "\n\nB\n\n\n\n\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n\nvvvv\nwwww\nxxxx\n\n\n1111\n2222\n\n\n5555",
+ "\n\naaaa\nBbbbb\ncccc\n\n\nffff\ngggg\n\n\njjjj\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n\nvvvv\nwwww\nxxxx\n\n\n1111\n2222\n\n\n5555",
"After unfolding the all buffers, all original text should be displayed"
);
}
@@ -24390,7 +24501,7 @@ async fn test_html_linked_edits_on_completion(cx: &mut TestAppContext) {
])))
});
editor.update_in(cx, |editor, window, cx| {
- editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
+ editor.show_completions(&ShowCompletions, window, cx);
});
cx.run_until_parked();
completion_handle.next().await.unwrap();
@@ -25693,6 +25804,17 @@ pub(crate) fn update_test_project_settings(
});
}
+pub(crate) fn update_test_editor_settings(
+ cx: &mut TestAppContext,
+ f: impl Fn(&mut EditorSettingsContent),
+) {
+ cx.update(|cx| {
+ SettingsStore::update_global(cx, |store, cx| {
+ store.update_user_settings(cx, |settings| f(&mut settings.editor));
+ })
+ })
+}
+
pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsContent)) {
cx.update(|cx| {
assets::Assets.load_test_fonts(cx);
@@ -27003,6 +27125,215 @@ async fn test_end_of_editor_context(cx: &mut TestAppContext) {
});
}
+#[gpui::test]
+async fn test_sticky_scroll(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+ let mut cx = EditorTestContext::new(cx).await;
+
+ let buffer = indoc! {"
+ Λfn foo() {
+ let abc = 123;
+ }
+ struct Bar;
+ impl Bar {
+ fn new() -> Self {
+ Self
+ }
+ }
+ fn baz() {
+ }
+ "};
+ cx.set_state(&buffer);
+
+ cx.update_editor(|e, _, cx| {
+ e.buffer()
+ .read(cx)
+ .as_singleton()
+ .unwrap()
+ .update(cx, |buffer, cx| {
+ buffer.set_language(Some(rust_lang()), cx);
+ })
+ });
+
+ let mut sticky_headers = |offset: ScrollOffset| {
+ cx.update_editor(|e, window, cx| {
+ e.scroll(gpui::Point { x: 0., y: offset }, None, window, cx);
+ EditorElement::sticky_headers(&e, &e.snapshot(window, cx), cx)
+ .into_iter()
+ .map(
+ |StickyHeader {
+ start_point,
+ offset,
+ ..
+ }| { (start_point, offset) },
+ )
+ .collect::<Vec<_>>()
+ })
+ };
+
+ let fn_foo = Point { row: 0, column: 0 };
+ let impl_bar = Point { row: 4, column: 0 };
+ let fn_new = Point { row: 5, column: 4 };
+
+ assert_eq!(sticky_headers(0.0), vec![]);
+ assert_eq!(sticky_headers(0.5), vec![(fn_foo, 0.0)]);
+ assert_eq!(sticky_headers(1.0), vec![(fn_foo, 0.0)]);
+ assert_eq!(sticky_headers(1.5), vec![(fn_foo, -0.5)]);
+ assert_eq!(sticky_headers(2.0), vec![]);
+ assert_eq!(sticky_headers(2.5), vec![]);
+ assert_eq!(sticky_headers(3.0), vec![]);
+ assert_eq!(sticky_headers(3.5), vec![]);
+ assert_eq!(sticky_headers(4.0), vec![]);
+ assert_eq!(sticky_headers(4.5), vec![(impl_bar, 0.0), (fn_new, 1.0)]);
+ assert_eq!(sticky_headers(5.0), vec![(impl_bar, 0.0), (fn_new, 1.0)]);
+ assert_eq!(sticky_headers(5.5), vec![(impl_bar, 0.0), (fn_new, 0.5)]);
+ assert_eq!(sticky_headers(6.0), vec![(impl_bar, 0.0)]);
+ assert_eq!(sticky_headers(6.5), vec![(impl_bar, 0.0)]);
+ assert_eq!(sticky_headers(7.0), vec![(impl_bar, 0.0)]);
+ assert_eq!(sticky_headers(7.5), vec![(impl_bar, -0.5)]);
+ assert_eq!(sticky_headers(8.0), vec![]);
+ assert_eq!(sticky_headers(8.5), vec![]);
+ assert_eq!(sticky_headers(9.0), vec![]);
+ assert_eq!(sticky_headers(9.5), vec![]);
+ assert_eq!(sticky_headers(10.0), vec![]);
+}
+
+#[gpui::test]
+async fn test_scroll_by_clicking_sticky_header(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+ cx.update(|cx| {
+ SettingsStore::update_global(cx, |store, cx| {
+ store.update_user_settings(cx, |settings| {
+ settings.editor.sticky_scroll = Some(settings::StickyScrollContent {
+ enabled: Some(true),
+ })
+ });
+ });
+ });
+ let mut cx = EditorTestContext::new(cx).await;
+
+ let line_height = cx.editor(|editor, window, _cx| {
+ editor
+ .style()
+ .unwrap()
+ .text
+ .line_height_in_pixels(window.rem_size())
+ });
+
+ let buffer = indoc! {"
+ Λfn foo() {
+ let abc = 123;
+ }
+ struct Bar;
+ impl Bar {
+ fn new() -> Self {
+ Self
+ }
+ }
+ fn baz() {
+ }
+ "};
+ cx.set_state(&buffer);
+
+ cx.update_editor(|e, _, cx| {
+ e.buffer()
+ .read(cx)
+ .as_singleton()
+ .unwrap()
+ .update(cx, |buffer, cx| {
+ buffer.set_language(Some(rust_lang()), cx);
+ })
+ });
+
+ let fn_foo = || empty_range(0, 0);
+ let impl_bar = || empty_range(4, 0);
+ let fn_new = || empty_range(5, 4);
+
+ let mut scroll_and_click = |scroll_offset: ScrollOffset, click_offset: ScrollOffset| {
+ cx.update_editor(|e, window, cx| {
+ e.scroll(
+ gpui::Point {
+ x: 0.,
+ y: scroll_offset,
+ },
+ None,
+ window,
+ cx,
+ );
+ });
+ cx.simulate_click(
+ gpui::Point {
+ x: px(0.),
+ y: click_offset as f32 * line_height,
+ },
+ Modifiers::none(),
+ );
+ cx.update_editor(|e, _, cx| (e.scroll_position(cx), display_ranges(e, cx)))
+ };
+
+ assert_eq!(
+ scroll_and_click(
+ 4.5, // impl Bar is halfway off the screen
+ 0.0 // click top of screen
+ ),
+ // scrolled to impl Bar
+ (gpui::Point { x: 0., y: 4. }, vec![impl_bar()])
+ );
+
+ assert_eq!(
+ scroll_and_click(
+ 4.5, // impl Bar is halfway off the screen
+ 0.25 // click middle of impl Bar
+ ),
+ // scrolled to impl Bar
+ (gpui::Point { x: 0., y: 4. }, vec![impl_bar()])
+ );
+
+ assert_eq!(
+ scroll_and_click(
+ 4.5, // impl Bar is halfway off the screen
+ 1.5 // click below impl Bar (e.g. fn new())
+ ),
+ // scrolled to fn new() - this is below the impl Bar header which has persisted
+ (gpui::Point { x: 0., y: 4. }, vec![fn_new()])
+ );
+
+ assert_eq!(
+ scroll_and_click(
+ 5.5, // fn new is halfway underneath impl Bar
+ 0.75 // click on the overlap of impl Bar and fn new()
+ ),
+ (gpui::Point { x: 0., y: 4. }, vec![impl_bar()])
+ );
+
+ assert_eq!(
+ scroll_and_click(
+ 5.5, // fn new is halfway underneath impl Bar
+ 1.25 // click on the visible part of fn new()
+ ),
+ (gpui::Point { x: 0., y: 4. }, vec![fn_new()])
+ );
+
+ assert_eq!(
+ scroll_and_click(
+ 1.5, // fn foo is halfway off the screen
+ 0.0 // click top of screen
+ ),
+ (gpui::Point { x: 0., y: 0. }, vec![fn_foo()])
+ );
+
+ assert_eq!(
+ scroll_and_click(
+ 1.5, // fn foo is halfway off the screen
+ 0.75 // click visible part of let abc...
+ )
+ .0,
+ // no change in scroll
+ // we don't assert on the visible_range because if we clicked the gutter, our line is fully selected
+ (gpui::Point { x: 0., y: 1.5 })
+ );
+}
+
#[gpui::test]
async fn test_next_prev_reference(cx: &mut TestAppContext) {
const CYCLE_POSITIONS: &[&'static str] = &[
@@ -27122,3 +27453,213 @@ async fn test_next_prev_reference(cx: &mut TestAppContext) {
_move(Direction::Prev, 2, &mut cx).await;
cx.assert_editor_state(CYCLE_POSITIONS[1]);
}
+
+#[gpui::test]
+async fn test_multibuffer_selections_with_folding(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let (editor, cx) = cx.add_window_view(|window, cx| {
+ let multi_buffer = MultiBuffer::build_multi(
+ [
+ ("1\n2\n3\n", vec![Point::row_range(0..3)]),
+ ("1\n2\n3\n", vec![Point::row_range(0..3)]),
+ ],
+ cx,
+ );
+ Editor::new(EditorMode::full(), multi_buffer, None, window, cx)
+ });
+
+ let mut cx = EditorTestContext::for_editor_in(editor.clone(), cx).await;
+ let buffer_ids = cx.multibuffer(|mb, _| mb.excerpt_buffer_ids());
+
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ Λ1
+ 2
+ 3
+ [EXCERPT]
+ 1
+ 2
+ 3
+ "});
+
+ // Scenario 1: Unfolded buffers, position cursor on "2", select all matches, then insert
+ cx.update_editor(|editor, window, cx| {
+ editor.change_selections(None.into(), window, cx, |s| {
+ s.select_ranges([2..3]);
+ });
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ 1
+ 2Λ
+ 3
+ [EXCERPT]
+ 1
+ 2
+ 3
+ "});
+
+ cx.update_editor(|editor, window, cx| {
+ editor
+ .select_all_matches(&SelectAllMatches, window, cx)
+ .unwrap();
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ 1
+ 2Λ
+ 3
+ [EXCERPT]
+ 1
+ 2Λ
+ 3
+ "});
+
+ cx.update_editor(|editor, window, cx| {
+ editor.handle_input("X", window, cx);
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ 1
+ XΛ
+ 3
+ [EXCERPT]
+ 1
+ XΛ
+ 3
+ "});
+
+ // Scenario 2: Select "2", then fold second buffer before insertion
+ cx.update_multibuffer(|mb, cx| {
+ for buffer_id in buffer_ids.iter() {
+ let buffer = mb.buffer(*buffer_id).unwrap();
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit([(0..buffer.len(), "1\n2\n3\n")], None, cx);
+ });
+ }
+ });
+
+ // Select "2" and select all matches
+ cx.update_editor(|editor, window, cx| {
+ editor.change_selections(None.into(), window, cx, |s| {
+ s.select_ranges([2..3]);
+ });
+ editor
+ .select_all_matches(&SelectAllMatches, window, cx)
+ .unwrap();
+ });
+
+ // Fold second buffer - should remove selections from folded buffer
+ cx.update_editor(|editor, _, cx| {
+ editor.fold_buffer(buffer_ids[1], cx);
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ 1
+ 2Λ
+ 3
+ [EXCERPT]
+ [FOLDED]
+ "});
+
+ // Insert text - should only affect first buffer
+ cx.update_editor(|editor, window, cx| {
+ editor.handle_input("Y", window, cx);
+ });
+ cx.update_editor(|editor, _, cx| {
+ editor.unfold_buffer(buffer_ids[1], cx);
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ 1
+ YΛ
+ 3
+ [EXCERPT]
+ 1
+ 2
+ 3
+ "});
+
+ // Scenario 3: Select "2", then fold first buffer before insertion
+ cx.update_multibuffer(|mb, cx| {
+ for buffer_id in buffer_ids.iter() {
+ let buffer = mb.buffer(*buffer_id).unwrap();
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit([(0..buffer.len(), "1\n2\n3\n")], None, cx);
+ });
+ }
+ });
+
+ // Select "2" and select all matches
+ cx.update_editor(|editor, window, cx| {
+ editor.change_selections(None.into(), window, cx, |s| {
+ s.select_ranges([2..3]);
+ });
+ editor
+ .select_all_matches(&SelectAllMatches, window, cx)
+ .unwrap();
+ });
+
+ // Fold first buffer - should remove selections from folded buffer
+ cx.update_editor(|editor, _, cx| {
+ editor.fold_buffer(buffer_ids[0], cx);
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ [FOLDED]
+ [EXCERPT]
+ 1
+ 2Λ
+ 3
+ "});
+
+ // Insert text - should only affect second buffer
+ cx.update_editor(|editor, window, cx| {
+ editor.handle_input("Z", window, cx);
+ });
+ cx.update_editor(|editor, _, cx| {
+ editor.unfold_buffer(buffer_ids[0], cx);
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ 1
+ 2
+ 3
+ [EXCERPT]
+ 1
+ ZΛ
+ 3
+ "});
+
+ // Edge case scenario: fold all buffers, then try to insert
+ cx.update_editor(|editor, _, cx| {
+ editor.fold_buffer(buffer_ids[0], cx);
+ editor.fold_buffer(buffer_ids[1], cx);
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ Λ[FOLDED]
+ [EXCERPT]
+ [FOLDED]
+ "});
+
+ // Insert should work via default selection
+ cx.update_editor(|editor, window, cx| {
+ editor.handle_input("W", window, cx);
+ });
+ cx.update_editor(|editor, _, cx| {
+ editor.unfold_buffer(buffer_ids[0], cx);
+ editor.unfold_buffer(buffer_ids[1], cx);
+ });
+ cx.assert_excerpts_with_selections(indoc! {"
+ [EXCERPT]
+ WΛ1
+ 2
+ 3
+ [EXCERPT]
+ 1
+ Z
+ 3
+ "});
+}
@@ -8,8 +8,8 @@ use crate::{
HandleInput, HoveredCursor, InlayHintRefreshReason, JumpData, LineDown, LineHighlight, LineUp,
MAX_LINE_LEN, MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts,
OpenExcerptsSplit, PageDown, PageUp, PhantomBreakpointIndicator, Point, RowExt, RowRangeExt,
- SelectPhase, SelectedTextHighlight, Selection, SelectionDragState, SizingBehavior, SoftWrap,
- StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll,
+ SelectPhase, SelectedTextHighlight, Selection, SelectionDragState, SelectionEffects,
+ SizingBehavior, SoftWrap, StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll,
code_context_menus::{CodeActionsMenu, MENU_ASIDE_MAX_WIDTH, MENU_ASIDE_MIN_WIDTH, MENU_GAP},
display_map::{
Block, BlockContext, BlockStyle, ChunkRendererId, DisplaySnapshot, EditorMargins,
@@ -29,7 +29,7 @@ use crate::{
items::BufferSearchHighlights,
mouse_context_menu::{self, MenuPosition},
scroll::{
- ActiveScrollbarState, ScrollOffset, ScrollPixelOffset, ScrollbarThumbState,
+ ActiveScrollbarState, Autoscroll, ScrollOffset, ScrollPixelOffset, ScrollbarThumbState,
scroll_amount::ScrollAmount,
},
};
@@ -3255,11 +3255,9 @@ impl EditorElement {
(newest_selection_head, relative)
});
- let relative_to = if relative.enabled() {
- Some(newest_selection_head.row())
- } else {
- None
- };
+ let relative_line_numbers_enabled = relative.enabled();
+ let relative_to = relative_line_numbers_enabled.then(|| newest_selection_head.row());
+
let relative_rows =
self.calculate_relative_line_numbers(snapshot, &rows, relative_to, relative.wrapped());
let mut line_number = String::new();
@@ -3271,17 +3269,18 @@ impl EditorElement {
} else {
row_info.buffer_row? + 1
};
- let number = relative_rows
- .get(&display_row)
- .unwrap_or(&non_relative_number);
- write!(&mut line_number, "{number}").unwrap();
- if row_info
- .diff_status
- .is_some_and(|status| status.is_deleted())
+ let relative_number = relative_rows.get(&display_row);
+ if !(relative_line_numbers_enabled && relative_number.is_some())
+ && row_info
+ .diff_status
+ .is_some_and(|status| status.is_deleted())
{
return None;
}
+ let number = relative_number.unwrap_or(&non_relative_number);
+ write!(&mut line_number, "{number}").unwrap();
+
let color = active_rows
.get(&display_row)
.map(|spec| {
@@ -4555,6 +4554,138 @@ impl EditorElement {
header
}
+ fn layout_sticky_headers(
+ &self,
+ snapshot: &EditorSnapshot,
+ editor_width: Pixels,
+ is_row_soft_wrapped: impl Copy + Fn(usize) -> bool,
+ line_height: Pixels,
+ scroll_pixel_position: gpui::Point<ScrollPixelOffset>,
+ content_origin: gpui::Point<Pixels>,
+ gutter_dimensions: &GutterDimensions,
+ gutter_hitbox: &Hitbox,
+ text_hitbox: &Hitbox,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Option<StickyHeaders> {
+ let show_line_numbers = snapshot
+ .show_line_numbers
+ .unwrap_or_else(|| EditorSettings::get_global(cx).gutter.line_numbers);
+
+ let rows = Self::sticky_headers(self.editor.read(cx), snapshot, cx);
+
+ let mut lines = Vec::<StickyHeaderLine>::new();
+
+ for StickyHeader {
+ item,
+ sticky_row,
+ start_point,
+ offset,
+ } in rows.into_iter().rev()
+ {
+ let line = layout_line(
+ sticky_row,
+ snapshot,
+ &self.style,
+ editor_width,
+ is_row_soft_wrapped,
+ window,
+ cx,
+ );
+
+ let line_number = show_line_numbers.then(|| {
+ let number = (start_point.row + 1).to_string();
+ let color = cx.theme().colors().editor_line_number;
+ self.shape_line_number(SharedString::from(number), color, window)
+ });
+
+ lines.push(StickyHeaderLine::new(
+ sticky_row,
+ line_height * offset as f32,
+ line,
+ line_number,
+ item.range.start,
+ line_height,
+ scroll_pixel_position,
+ content_origin,
+ gutter_hitbox,
+ text_hitbox,
+ window,
+ cx,
+ ));
+ }
+
+ lines.reverse();
+ if lines.is_empty() {
+ return None;
+ }
+
+ Some(StickyHeaders {
+ lines,
+ gutter_background: cx.theme().colors().editor_gutter_background,
+ content_background: self.style.background,
+ gutter_right_padding: gutter_dimensions.right_padding,
+ })
+ }
+
+ pub(crate) fn sticky_headers(
+ editor: &Editor,
+ snapshot: &EditorSnapshot,
+ cx: &App,
+ ) -> Vec<StickyHeader> {
+ let scroll_top = snapshot.scroll_position().y;
+
+ let mut end_rows = Vec::<DisplayRow>::new();
+ let mut rows = Vec::<StickyHeader>::new();
+
+ let items = editor.sticky_headers(cx).unwrap_or_default();
+
+ for item in items {
+ let start_point = item.range.start.to_point(snapshot.buffer_snapshot());
+ let end_point = item.range.end.to_point(snapshot.buffer_snapshot());
+
+ let sticky_row = snapshot
+ .display_snapshot
+ .point_to_display_point(start_point, Bias::Left)
+ .row();
+ let end_row = snapshot
+ .display_snapshot
+ .point_to_display_point(end_point, Bias::Left)
+ .row();
+ let max_sticky_row = end_row.previous_row();
+ if max_sticky_row <= sticky_row {
+ continue;
+ }
+
+ while end_rows
+ .last()
+ .is_some_and(|&last_end| last_end < sticky_row)
+ {
+ end_rows.pop();
+ }
+ let depth = end_rows.len();
+ let adjusted_scroll_top = scroll_top + depth as f64;
+
+ if sticky_row.as_f64() >= adjusted_scroll_top || end_row.as_f64() <= adjusted_scroll_top
+ {
+ continue;
+ }
+
+ let max_scroll_offset = max_sticky_row.as_f64() - scroll_top;
+ let offset = (depth as f64).min(max_scroll_offset);
+
+ end_rows.push(end_row);
+ rows.push(StickyHeader {
+ item,
+ sticky_row,
+ start_point,
+ offset,
+ });
+ }
+
+ rows
+ }
+
fn layout_cursor_popovers(
&self,
line_height: Pixels,
@@ -6407,6 +6538,89 @@ impl EditorElement {
}
}
+ fn paint_sticky_headers(
+ &mut self,
+ layout: &mut EditorLayout,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ let Some(mut sticky_headers) = layout.sticky_headers.take() else {
+ return;
+ };
+
+ if sticky_headers.lines.is_empty() {
+ layout.sticky_headers = Some(sticky_headers);
+ return;
+ }
+
+ let whitespace_setting = self
+ .editor
+ .read(cx)
+ .buffer
+ .read(cx)
+ .language_settings(cx)
+ .show_whitespaces;
+ sticky_headers.paint(layout, whitespace_setting, window, cx);
+
+ let sticky_header_hitboxes: Vec<Hitbox> = sticky_headers
+ .lines
+ .iter()
+ .map(|line| line.hitbox.clone())
+ .collect();
+ let hovered_hitbox = sticky_header_hitboxes
+ .iter()
+ .find_map(|hitbox| hitbox.is_hovered(window).then_some(hitbox.id));
+
+ window.on_mouse_event(move |_: &MouseMoveEvent, phase, window, _cx| {
+ if !phase.bubble() {
+ return;
+ }
+
+ let current_hover = sticky_header_hitboxes
+ .iter()
+ .find_map(|hitbox| hitbox.is_hovered(window).then_some(hitbox.id));
+ if hovered_hitbox != current_hover {
+ window.refresh();
+ }
+ });
+
+ for (line_index, line) in sticky_headers.lines.iter().enumerate() {
+ let editor = self.editor.clone();
+ let hitbox = line.hitbox.clone();
+ let target_anchor = line.target_anchor;
+ window.on_mouse_event(move |event: &MouseDownEvent, phase, window, cx| {
+ if !phase.bubble() {
+ return;
+ }
+
+ if event.button == MouseButton::Left && hitbox.is_hovered(window) {
+ editor.update(cx, |editor, cx| {
+ editor.change_selections(
+ SelectionEffects::scroll(Autoscroll::top_relative(line_index)),
+ window,
+ cx,
+ |selections| selections.select_ranges([target_anchor..target_anchor]),
+ );
+ cx.stop_propagation();
+ });
+ }
+ });
+ }
+
+ let text_bounds = layout.position_map.text_hitbox.bounds;
+ let border_top = text_bounds.top()
+ + sticky_headers.lines.last().unwrap().offset
+ + layout.position_map.line_height;
+ let separator_height = px(1.);
+ let border_bounds = Bounds::from_corners(
+ point(layout.gutter_hitbox.bounds.left(), border_top),
+ point(text_bounds.right(), border_top + separator_height),
+ );
+ window.paint_quad(fill(border_bounds, cx.theme().colors().border_variant));
+
+ layout.sticky_headers = Some(sticky_headers);
+ }
+
fn paint_lines_background(
&mut self,
layout: &mut EditorLayout,
@@ -8107,6 +8321,27 @@ impl LineWithInvisibles {
cx: &mut App,
) {
let line_y = f32::from(line_height) * Pixels::from(row.as_f64() - scroll_position.y);
+ self.prepaint_with_custom_offset(
+ line_height,
+ scroll_pixel_position,
+ content_origin,
+ line_y,
+ line_elements,
+ window,
+ cx,
+ );
+ }
+
+ fn prepaint_with_custom_offset(
+ &mut self,
+ line_height: Pixels,
+ scroll_pixel_position: gpui::Point<ScrollPixelOffset>,
+ content_origin: gpui::Point<Pixels>,
+ line_y: Pixels,
+ line_elements: &mut SmallVec<[AnyElement; 1]>,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
let mut fragment_origin =
content_origin + gpui::point(Pixels::from(-scroll_pixel_position.x), line_y);
for fragment in &mut self.fragments {
@@ -8141,9 +8376,31 @@ impl LineWithInvisibles {
window: &mut Window,
cx: &mut App,
) {
- let line_height = layout.position_map.line_height;
- let line_y = line_height * (row.as_f64() - layout.position_map.scroll_position.y) as f32;
+ self.draw_with_custom_offset(
+ layout,
+ row,
+ content_origin,
+ layout.position_map.line_height
+ * (row.as_f64() - layout.position_map.scroll_position.y) as f32,
+ whitespace_setting,
+ selection_ranges,
+ window,
+ cx,
+ );
+ }
+ fn draw_with_custom_offset(
+ &self,
+ layout: &EditorLayout,
+ row: DisplayRow,
+ content_origin: gpui::Point<Pixels>,
+ line_y: Pixels,
+ whitespace_setting: ShowWhitespaceSetting,
+ selection_ranges: &[Range<DisplayPoint>],
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ let line_height = layout.position_map.line_height;
let mut fragment_origin = content_origin
+ gpui::point(
Pixels::from(-layout.position_map.scroll_pixel_position.x),
@@ -8582,6 +8839,7 @@ impl Element for EditorElement {
};
let is_minimap = self.editor.read(cx).mode.is_minimap();
+ let is_singleton = self.editor.read(cx).buffer_kind(cx) == ItemBufferKind::Singleton;
if !is_minimap {
let focus_handle = self.editor.focus_handle(cx);
@@ -9228,6 +9486,26 @@ impl Element for EditorElement {
scroll_position.x * f64::from(em_advance),
scroll_position.y * f64::from(line_height),
);
+ let sticky_headers = if !is_minimap
+ && is_singleton
+ && EditorSettings::get_global(cx).sticky_scroll.enabled
+ {
+ self.layout_sticky_headers(
+ &snapshot,
+ editor_width,
+ is_row_soft_wrapped,
+ line_height,
+ scroll_pixel_position,
+ content_origin,
+ &gutter_dimensions,
+ &gutter_hitbox,
+ &text_hitbox,
+ window,
+ cx,
+ )
+ } else {
+ None
+ };
let indent_guides = self.layout_indent_guides(
content_origin,
text_hitbox.origin,
@@ -9697,6 +9975,7 @@ impl Element for EditorElement {
tab_invisible,
space_invisible,
sticky_buffer_header,
+ sticky_headers,
expand_toggles,
}
})
@@ -9767,6 +10046,7 @@ impl Element for EditorElement {
}
});
+ self.paint_sticky_headers(layout, window, cx);
self.paint_minimap(layout, window, cx);
self.paint_scrollbars(layout, window, cx);
self.paint_edit_prediction_popover(layout, window, cx);
@@ -9875,15 +10155,180 @@ pub struct EditorLayout {
tab_invisible: ShapedLine,
space_invisible: ShapedLine,
sticky_buffer_header: Option<AnyElement>,
+ sticky_headers: Option<StickyHeaders>,
document_colors: Option<(DocumentColorsRenderMode, Vec<(Range<DisplayPoint>, Hsla)>)>,
}
+struct StickyHeaders {
+ lines: Vec<StickyHeaderLine>,
+ gutter_background: Hsla,
+ content_background: Hsla,
+ gutter_right_padding: Pixels,
+}
+
+struct StickyHeaderLine {
+ row: DisplayRow,
+ offset: Pixels,
+ line: LineWithInvisibles,
+ line_number: Option<ShapedLine>,
+ elements: SmallVec<[AnyElement; 1]>,
+ available_text_width: Pixels,
+ target_anchor: Anchor,
+ hitbox: Hitbox,
+}
+
impl EditorLayout {
fn line_end_overshoot(&self) -> Pixels {
0.15 * self.position_map.line_height
}
}
+impl StickyHeaders {
+ fn paint(
+ &mut self,
+ layout: &mut EditorLayout,
+ whitespace_setting: ShowWhitespaceSetting,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ let line_height = layout.position_map.line_height;
+
+ for line in self.lines.iter_mut().rev() {
+ window.paint_layer(
+ Bounds::new(
+ layout.gutter_hitbox.origin + point(Pixels::ZERO, line.offset),
+ size(line.hitbox.size.width, line_height),
+ ),
+ |window| {
+ let gutter_bounds = Bounds::new(
+ layout.gutter_hitbox.origin + point(Pixels::ZERO, line.offset),
+ size(layout.gutter_hitbox.size.width, line_height),
+ );
+ window.paint_quad(fill(gutter_bounds, self.gutter_background));
+
+ let text_bounds = Bounds::new(
+ layout.position_map.text_hitbox.origin + point(Pixels::ZERO, line.offset),
+ size(line.available_text_width, line_height),
+ );
+ window.paint_quad(fill(text_bounds, self.content_background));
+
+ if line.hitbox.is_hovered(window) {
+ let hover_overlay = cx.theme().colors().panel_overlay_hover;
+ window.paint_quad(fill(gutter_bounds, hover_overlay));
+ window.paint_quad(fill(text_bounds, hover_overlay));
+ }
+
+ line.paint(
+ layout,
+ self.gutter_right_padding,
+ line.available_text_width,
+ layout.content_origin,
+ line_height,
+ whitespace_setting,
+ window,
+ cx,
+ );
+ },
+ );
+
+ window.set_cursor_style(CursorStyle::PointingHand, &line.hitbox);
+ }
+ }
+}
+
+impl StickyHeaderLine {
+ fn new(
+ row: DisplayRow,
+ offset: Pixels,
+ mut line: LineWithInvisibles,
+ line_number: Option<ShapedLine>,
+ target_anchor: Anchor,
+ line_height: Pixels,
+ scroll_pixel_position: gpui::Point<ScrollPixelOffset>,
+ content_origin: gpui::Point<Pixels>,
+ gutter_hitbox: &Hitbox,
+ text_hitbox: &Hitbox,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Self {
+ let mut elements = SmallVec::<[AnyElement; 1]>::new();
+ line.prepaint_with_custom_offset(
+ line_height,
+ scroll_pixel_position,
+ content_origin,
+ offset,
+ &mut elements,
+ window,
+ cx,
+ );
+
+ let hitbox_bounds = Bounds::new(
+ gutter_hitbox.origin + point(Pixels::ZERO, offset),
+ size(text_hitbox.right() - gutter_hitbox.left(), line_height),
+ );
+ let available_text_width =
+ (hitbox_bounds.size.width - gutter_hitbox.size.width).max(Pixels::ZERO);
+
+ Self {
+ row,
+ offset,
+ line,
+ line_number,
+ elements,
+ available_text_width,
+ target_anchor,
+ hitbox: window.insert_hitbox(hitbox_bounds, HitboxBehavior::BlockMouseExceptScroll),
+ }
+ }
+
+ fn paint(
+ &mut self,
+ layout: &EditorLayout,
+ gutter_right_padding: Pixels,
+ available_text_width: Pixels,
+ content_origin: gpui::Point<Pixels>,
+ line_height: Pixels,
+ whitespace_setting: ShowWhitespaceSetting,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ window.with_content_mask(
+ Some(ContentMask {
+ bounds: Bounds::new(
+ layout.position_map.text_hitbox.bounds.origin
+ + point(Pixels::ZERO, self.offset),
+ size(available_text_width, line_height),
+ ),
+ }),
+ |window| {
+ self.line.draw_with_custom_offset(
+ layout,
+ self.row,
+ content_origin,
+ self.offset,
+ whitespace_setting,
+ &[],
+ window,
+ cx,
+ );
+ for element in &mut self.elements {
+ element.paint(window, cx);
+ }
+ },
+ );
+
+ if let Some(line_number) = &self.line_number {
+ let gutter_origin = layout.gutter_hitbox.origin + point(Pixels::ZERO, self.offset);
+ let gutter_width = layout.gutter_hitbox.size.width;
+ let origin = point(
+ gutter_origin.x + gutter_width - gutter_right_padding - line_number.width,
+ gutter_origin.y,
+ );
+ line_number.paint(origin, line_height, window, cx).log_err();
+ }
+ }
+}
+
#[derive(Debug)]
struct LineNumberSegment {
shaped_line: ShapedLine,
@@ -10730,6 +11175,13 @@ impl HighlightedRange {
}
}
+pub(crate) struct StickyHeader {
+ pub item: language::OutlineItem<Anchor>,
+ pub sticky_row: DisplayRow,
+ pub start_point: Point,
+ pub offset: ScrollOffset,
+}
+
enum CursorPopoverType {
CodeContextMenu,
EditPrediction,
@@ -11002,6 +11454,46 @@ mod tests {
assert_eq!(relative_rows[&DisplayRow(0)], 5);
assert_eq!(relative_rows[&DisplayRow(1)], 4);
assert_eq!(relative_rows[&DisplayRow(2)], 3);
+
+ const DELETED_LINE: u32 = 3;
+ let layouts = cx
+ .update_window(*window, |_, window, cx| {
+ element.layout_line_numbers(
+ None,
+ GutterDimensions {
+ left_padding: Pixels::ZERO,
+ right_padding: Pixels::ZERO,
+ width: px(30.0),
+ margin: Pixels::ZERO,
+ git_blame_entries_width: None,
+ },
+ line_height,
+ gpui::Point::default(),
+ DisplayRow(0)..DisplayRow(6),
+ &(0..6)
+ .map(|row| RowInfo {
+ buffer_row: Some(row),
+ diff_status: (row == DELETED_LINE).then(|| {
+ DiffHunkStatus::deleted(
+ buffer_diff::DiffHunkSecondaryStatus::NoSecondaryHunk,
+ )
+ }),
+ ..Default::default()
+ })
+ .collect::<Vec<_>>(),
+ &BTreeMap::default(),
+ Some(DisplayPoint::new(DisplayRow(0), 0)),
+ &snapshot,
+ window,
+ cx,
+ )
+ })
+ .unwrap();
+ assert_eq!(layouts.len(), 5,);
+ assert!(
+ layouts.get(&MultiBufferRow(DELETED_LINE)).is_none(),
+ "Deleted line should not have a line number"
+ );
}
#[gpui::test]
@@ -11077,6 +11569,62 @@ mod tests {
// current line has no relative number
assert_eq!(relative_rows[&DisplayRow(4)], 1);
assert_eq!(relative_rows[&DisplayRow(5)], 2);
+
+ let layouts = cx
+ .update_window(*window, |_, window, cx| {
+ element.layout_line_numbers(
+ None,
+ GutterDimensions {
+ left_padding: Pixels::ZERO,
+ right_padding: Pixels::ZERO,
+ width: px(30.0),
+ margin: Pixels::ZERO,
+ git_blame_entries_width: None,
+ },
+ line_height,
+ gpui::Point::default(),
+ DisplayRow(0)..DisplayRow(6),
+ &(0..6)
+ .map(|row| RowInfo {
+ buffer_row: Some(row),
+ diff_status: Some(DiffHunkStatus::deleted(
+ buffer_diff::DiffHunkSecondaryStatus::NoSecondaryHunk,
+ )),
+ ..Default::default()
+ })
+ .collect::<Vec<_>>(),
+ &BTreeMap::from_iter([(DisplayRow(0), LineHighlightSpec::default())]),
+ Some(DisplayPoint::new(DisplayRow(0), 0)),
+ &snapshot,
+ window,
+ cx,
+ )
+ })
+ .unwrap();
+ assert!(
+ layouts.is_empty(),
+ "Deleted lines should have no line number"
+ );
+
+ let relative_rows = window
+ .update(cx, |editor, window, cx| {
+ let snapshot = editor.snapshot(window, cx);
+ element.calculate_relative_line_numbers(
+ &snapshot,
+ &(DisplayRow(0)..DisplayRow(6)),
+ Some(DisplayRow(3)),
+ true,
+ )
+ })
+ .unwrap();
+
+ // Deleted lines should still have relative numbers
+ assert_eq!(relative_rows[&DisplayRow(0)], 3);
+ assert_eq!(relative_rows[&DisplayRow(1)], 2);
+ assert_eq!(relative_rows[&DisplayRow(2)], 1);
+ // current line, even if deleted, has no relative number
+ assert_eq!(relative_rows[&DisplayRow(4)], 1);
+ assert_eq!(relative_rows[&DisplayRow(5)], 2);
}
#[gpui::test]
@@ -1796,6 +1796,14 @@ impl SearchableItem for Editor {
fn search_bar_visibility_changed(&mut self, _: bool, _: &mut Window, _: &mut Context<Self>) {
self.expect_bounds_change = self.last_bounds;
}
+
+ fn set_search_is_case_sensitive(
+ &mut self,
+ case_sensitive: Option<bool>,
+ _cx: &mut Context<Self>,
+ ) {
+ self.select_next_is_case_sensitive = case_sensitive;
+ }
}
pub fn active_match_index(
@@ -487,6 +487,43 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> {
self.selections_changed |= changed;
}
+ pub fn remove_selections_from_buffer(&mut self, buffer_id: language::BufferId) {
+ let mut changed = false;
+
+ let filtered_selections: Arc<[Selection<Anchor>]> = {
+ self.disjoint
+ .iter()
+ .filter(|selection| {
+ if let Some(selection_buffer_id) =
+ self.snapshot.buffer_id_for_anchor(selection.start)
+ {
+ let should_remove = selection_buffer_id == buffer_id;
+ changed |= should_remove;
+ !should_remove
+ } else {
+ true
+ }
+ })
+ .cloned()
+ .collect()
+ };
+
+ if filtered_selections.is_empty() {
+ let default_anchor = self.snapshot.anchor_before(0);
+ self.collection.disjoint = Arc::from([Selection {
+ id: post_inc(&mut self.collection.next_selection_id),
+ start: default_anchor,
+ end: default_anchor,
+ reversed: false,
+ goal: SelectionGoal::None,
+ }]);
+ } else {
+ self.collection.disjoint = filtered_selections;
+ }
+
+ self.selections_changed |= changed;
+ }
+
pub fn clear_pending(&mut self) {
if self.collection.pending.is_some() {
self.collection.pending = None;
@@ -6,6 +6,7 @@ use buffer_diff::DiffHunkStatusKind;
use collections::BTreeMap;
use futures::Future;
+use git::repository::RepoPath;
use gpui::{
AnyWindowHandle, App, Context, Entity, Focusable as _, Keystroke, Pixels, Point,
VisualTestContext, Window, WindowHandle, prelude::*,
@@ -334,7 +335,10 @@ impl EditorTestContext {
let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone());
let mut found = None;
fs.with_git_state(&Self::root_path().join(".git"), false, |git_state| {
- found = git_state.index_contents.get(&path.into()).cloned();
+ found = git_state
+ .index_contents
+ .get(&RepoPath::from_rel_path(&path))
+ .cloned();
})
.unwrap();
assert_eq!(expected, found.as_deref());
@@ -463,8 +463,8 @@ pub fn find_model(
.ok_or_else(|| {
anyhow::anyhow!(
"No language model with ID {}/{} was available. Available models: {}",
- selected.model.0,
selected.provider.0,
+ selected.model.0,
model_registry
.available_models(cx)
.map(|model| format!("{}/{}", model.provider_id().0, model.id().0))
@@ -322,7 +322,7 @@ impl ExampleInstance {
thread.add_default_tools(Rc::new(EvalThreadEnvironment {
project: project.clone(),
}), cx);
- thread.set_profile(meta.profile_id.clone());
+ thread.set_profile(meta.profile_id.clone(), cx);
thread.set_model(
LanguageModelInterceptor::new(
LanguageModelRegistry::read_global(cx).default_model().expect("Missing model").model.clone(),
@@ -267,10 +267,9 @@ impl ExtensionManifest {
let mut extension_manifest_path = extension_dir.join("extension.json");
if fs.is_file(&extension_manifest_path).await {
- let manifest_content = fs
- .load(&extension_manifest_path)
- .await
- .with_context(|| format!("failed to load {extension_name} extension.json"))?;
+ let manifest_content = fs.load(&extension_manifest_path).await.with_context(|| {
+ format!("loading {extension_name} extension.json, {extension_manifest_path:?}")
+ })?;
let manifest_json = serde_json::from_str::<OldExtensionManifest>(&manifest_content)
.with_context(|| {
format!("invalid extension.json for extension {extension_name}")
@@ -279,10 +278,9 @@ impl ExtensionManifest {
Ok(manifest_from_old_manifest(manifest_json, extension_name))
} else {
extension_manifest_path.set_extension("toml");
- let manifest_content = fs
- .load(&extension_manifest_path)
- .await
- .with_context(|| format!("failed to load {extension_name} extension.toml"))?;
+ let manifest_content = fs.load(&extension_manifest_path).await.with_context(|| {
+ format!("loading {extension_name} extension.toml, {extension_manifest_path:?}")
+ })?;
toml::from_str(&manifest_content).map_err(|err| {
anyhow!("Invalid extension.toml for extension {extension_name}:\n{err}")
})
@@ -31,8 +31,7 @@ use util::test::TempTree;
#[cfg(test)]
#[ctor::ctor]
fn init_logger() {
- // show info logs while we debug the extension_store tests hanging.
- zlog::init_test_with("info");
+ zlog::init_test();
}
#[gpui::test]
@@ -532,6 +531,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
+ log::info!("Initializing test");
init_test(cx);
cx.executor().allow_parking();
@@ -556,6 +556,8 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
let extensions_dir = extensions_tree.path().canonicalize().unwrap();
let project_dir = project_dir.path().canonicalize().unwrap();
+ log::info!("Setting up test");
+
let project = Project::test(fs.clone(), [project_dir.as_path()], cx).await;
let proxy = Arc::new(ExtensionHostProxy::new());
@@ -674,6 +676,8 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
)
});
+ log::info!("Flushing events");
+
// Ensure that debounces fire.
let mut events = cx.events(&extension_store);
let executor = cx.executor();
@@ -763,17 +763,17 @@ impl WasmExtension {
.fs
.open_sync(&path)
.await
- .context("failed to open wasm file")?;
+ .context(format!("opening wasm file, path: {path:?}"))?;
let mut wasm_bytes = Vec::new();
wasm_file
.read_to_end(&mut wasm_bytes)
- .context("failed to read wasm")?;
+ .context(format!("reading wasm file, path: {path:?}"))?;
wasm_host
.load_extension(wasm_bytes, manifest, cx)
.await
- .with_context(|| format!("failed to load wasm extension {}", manifest.id))
+ .with_context(|| format!("loading wasm extension: {}", manifest.id))
}
pub async fn call<T, Fn>(&self, f: Fn) -> Result<T>
@@ -3452,3 +3452,99 @@ async fn test_paths_with_starting_slash(cx: &mut TestAppContext) {
assert_eq!(active_editor.read(cx).title(cx), "file1.txt");
});
}
+
+#[gpui::test]
+async fn test_clear_navigation_history(cx: &mut TestAppContext) {
+ let app_state = init_test(cx);
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ path!("/src"),
+ json!({
+ "test": {
+ "first.rs": "// First file",
+ "second.rs": "// Second file",
+ "third.rs": "// Third file",
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await;
+ let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
+
+ workspace.update_in(cx, |_workspace, window, cx| window.focused(cx));
+
+ // Open some files to generate navigation history
+ open_close_queried_buffer("fir", 1, "first.rs", &workspace, cx).await;
+ open_close_queried_buffer("sec", 1, "second.rs", &workspace, cx).await;
+ let history_before_clear =
+ open_close_queried_buffer("thi", 1, "third.rs", &workspace, cx).await;
+
+ assert_eq!(
+ history_before_clear.len(),
+ 2,
+ "Should have history items before clearing"
+ );
+
+ // Verify that file finder shows history items
+ let picker = open_file_picker(&workspace, cx);
+ cx.simulate_input("fir");
+ picker.update(cx, |finder, _| {
+ let matches = collect_search_matches(finder);
+ assert!(
+ !matches.history.is_empty(),
+ "File finder should show history items before clearing"
+ );
+ });
+ workspace.update_in(cx, |_, window, cx| {
+ window.dispatch_action(menu::Cancel.boxed_clone(), cx);
+ });
+
+ // Verify navigation state before clear
+ workspace.update(cx, |workspace, cx| {
+ let pane = workspace.active_pane();
+ pane.read(cx).can_navigate_backward()
+ });
+
+ // Clear navigation history
+ cx.dispatch_action(workspace::ClearNavigationHistory);
+
+ // Verify that navigation is disabled immediately after clear
+ workspace.update(cx, |workspace, cx| {
+ let pane = workspace.active_pane();
+ assert!(
+ !pane.read(cx).can_navigate_backward(),
+ "Should not be able to navigate backward after clearing history"
+ );
+ assert!(
+ !pane.read(cx).can_navigate_forward(),
+ "Should not be able to navigate forward after clearing history"
+ );
+ });
+
+ // Verify that file finder no longer shows history items
+ let picker = open_file_picker(&workspace, cx);
+ cx.simulate_input("fir");
+ picker.update(cx, |finder, _| {
+ let matches = collect_search_matches(finder);
+ assert!(
+ matches.history.is_empty(),
+ "File finder should not show history items after clearing"
+ );
+ });
+ workspace.update_in(cx, |_, window, cx| {
+ window.dispatch_action(menu::Cancel.boxed_clone(), cx);
+ });
+
+ // Verify history is empty by opening a new file
+ // (this should not show any previous history)
+ let history_after_clear =
+ open_close_queried_buffer("sec", 1, "second.rs", &workspace, cx).await;
+ assert_eq!(
+ history_after_clear.len(),
+ 0,
+ "Should have no history items after clearing"
+ );
+}
@@ -399,7 +399,12 @@ impl PickerDelegate for OpenPathDelegate {
}
})
.unwrap_or(false);
- if should_prepend_with_current_dir {
+
+ let current_dir_in_new_entries = new_entries
+ .iter()
+ .any(|entry| &entry.path.string == current_dir);
+
+ if should_prepend_with_current_dir && !current_dir_in_new_entries {
new_entries.insert(
0,
CandidateInfo {
@@ -272,7 +272,7 @@ impl GitRepository for FakeGitRepository {
.ok()
.map(|content| String::from_utf8(content).unwrap())?;
let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?;
- Some((repo_path.into(), (content, is_ignored)))
+ Some((RepoPath::from_rel_path(&repo_path), (content, is_ignored)))
})
.collect();
@@ -407,7 +407,11 @@ impl GitRepository for FakeGitRepository {
})
}
- fn create_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
+ fn create_branch(
+ &self,
+ name: String,
+ _base_branch: Option<String>,
+ ) -> BoxFuture<'_, Result<()>> {
self.with_state_async(true, move |state| {
state.branches.insert(name);
Ok(())
@@ -432,7 +436,7 @@ impl GitRepository for FakeGitRepository {
state
.blames
.get(&path)
- .with_context(|| format!("failed to get blame for {:?}", path.0))
+ .with_context(|| format!("failed to get blame for {:?}", path))
.cloned()
})
}
@@ -522,6 +526,7 @@ impl GitRepository for FakeGitRepository {
_message: gpui::SharedString,
_name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
_options: CommitOptions,
+ _askpass: AskPassDelegate,
_env: Arc<HashMap<String, String>>,
) -> BoxFuture<'_, Result<()>> {
unimplemented!()
@@ -4,6 +4,10 @@ mod mac_watcher;
#[cfg(not(target_os = "macos"))]
pub mod fs_watcher;
+use parking_lot::Mutex;
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::time::Instant;
+
use anyhow::{Context as _, Result, anyhow};
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
use ashpd::desktop::trash;
@@ -12,6 +16,7 @@ use gpui::App;
use gpui::BackgroundExecutor;
use gpui::Global;
use gpui::ReadGlobal as _;
+use gpui::SharedString;
use std::borrow::Cow;
use util::command::new_smol_command;
@@ -51,8 +56,7 @@ use git::{
repository::{RepoPath, repo_path},
status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus},
};
-#[cfg(any(test, feature = "test-support"))]
-use parking_lot::Mutex;
+
#[cfg(any(test, feature = "test-support"))]
use smol::io::AsyncReadExt;
#[cfg(any(test, feature = "test-support"))]
@@ -148,6 +152,7 @@ pub trait Fs: Send + Sync {
async fn git_clone(&self, repo_url: &str, abs_work_directory: &Path) -> Result<()>;
fn is_fake(&self) -> bool;
async fn is_case_sensitive(&self) -> Result<bool>;
+ fn subscribe_to_jobs(&self) -> JobEventReceiver;
#[cfg(any(test, feature = "test-support"))]
fn as_fake(&self) -> Arc<FakeFs> {
@@ -215,6 +220,55 @@ pub struct Metadata {
#[serde(transparent)]
pub struct MTime(SystemTime);
+pub type JobId = usize;
+
+#[derive(Clone, Debug)]
+pub struct JobInfo {
+ pub start: Instant,
+ pub message: SharedString,
+ pub id: JobId,
+}
+
+#[derive(Debug, Clone)]
+pub enum JobEvent {
+ Started { info: JobInfo },
+ Completed { id: JobId },
+}
+
+pub type JobEventSender = futures::channel::mpsc::UnboundedSender<JobEvent>;
+pub type JobEventReceiver = futures::channel::mpsc::UnboundedReceiver<JobEvent>;
+
+struct JobTracker {
+ id: JobId,
+ subscribers: Arc<Mutex<Vec<JobEventSender>>>,
+}
+
+impl JobTracker {
+ fn new(info: JobInfo, subscribers: Arc<Mutex<Vec<JobEventSender>>>) -> Self {
+ let id = info.id;
+ {
+ let mut subs = subscribers.lock();
+ subs.retain(|sender| {
+ sender
+ .unbounded_send(JobEvent::Started { info: info.clone() })
+ .is_ok()
+ });
+ }
+ Self { id, subscribers }
+ }
+}
+
+impl Drop for JobTracker {
+ fn drop(&mut self) {
+ let mut subs = self.subscribers.lock();
+ subs.retain(|sender| {
+ sender
+ .unbounded_send(JobEvent::Completed { id: self.id })
+ .is_ok()
+ });
+ }
+}
+
impl MTime {
/// Conversion intended for persistence and testing.
pub fn from_seconds_and_nanos(secs: u64, nanos: u32) -> Self {
@@ -257,6 +311,8 @@ impl From<MTime> for proto::Timestamp {
pub struct RealFs {
bundled_git_binary_path: Option<PathBuf>,
executor: BackgroundExecutor,
+ next_job_id: Arc<AtomicUsize>,
+ job_event_subscribers: Arc<Mutex<Vec<JobEventSender>>>,
}
pub trait FileHandle: Send + Sync + std::fmt::Debug {
@@ -361,6 +417,8 @@ impl RealFs {
Self {
bundled_git_binary_path: git_binary_path,
executor,
+ next_job_id: Arc::new(AtomicUsize::new(0)),
+ job_event_subscribers: Arc::new(Mutex::new(Vec::new())),
}
}
}
@@ -719,9 +777,8 @@ impl Fs for RealFs {
{
Ok(metadata) => metadata,
Err(err) => {
- return match (err.kind(), err.raw_os_error()) {
- (io::ErrorKind::NotFound, _) => Ok(None),
- (io::ErrorKind::Other, Some(libc::ENOTDIR)) => Ok(None),
+ return match err.kind() {
+ io::ErrorKind::NotFound | io::ErrorKind::NotADirectory => Ok(None),
_ => Err(anyhow::Error::new(err)),
};
}
@@ -863,7 +920,6 @@ impl Fs for RealFs {
Pin<Box<dyn Send + Stream<Item = Vec<PathEvent>>>>,
Arc<dyn Watcher>,
) {
- use parking_lot::Mutex;
use util::{ResultExt as _, paths::SanitizedPath};
let (tx, rx) = smol::channel::unbounded();
@@ -960,6 +1016,15 @@ impl Fs for RealFs {
}
async fn git_clone(&self, repo_url: &str, abs_work_directory: &Path) -> Result<()> {
+ let job_id = self.next_job_id.fetch_add(1, Ordering::SeqCst);
+ let job_info = JobInfo {
+ id: job_id,
+ start: Instant::now(),
+ message: SharedString::from(format!("Cloning {}", repo_url)),
+ };
+
+ let _job_tracker = JobTracker::new(job_info, self.job_event_subscribers.clone());
+
let output = new_smol_command("git")
.current_dir(abs_work_directory)
.args(&["clone", repo_url])
@@ -980,6 +1045,12 @@ impl Fs for RealFs {
false
}
+ fn subscribe_to_jobs(&self) -> JobEventReceiver {
+ let (sender, receiver) = futures::channel::mpsc::unbounded();
+ self.job_event_subscribers.lock().push(sender);
+ receiver
+ }
+
/// Checks whether the file system is case sensitive by attempting to create two files
/// that have the same name except for the casing.
///
@@ -1050,6 +1121,7 @@ struct FakeFsState {
read_dir_call_count: usize,
path_write_counts: std::collections::HashMap<PathBuf, usize>,
moves: std::collections::HashMap<u64, PathBuf>,
+ job_event_subscribers: Arc<Mutex<Vec<JobEventSender>>>,
}
#[cfg(any(test, feature = "test-support"))]
@@ -1334,6 +1406,7 @@ impl FakeFs {
metadata_call_count: 0,
path_write_counts: Default::default(),
moves: Default::default(),
+ job_event_subscribers: Arc::new(Mutex::new(Vec::new())),
})),
});
@@ -1792,7 +1865,8 @@ impl FakeFs {
for (path, content) in workdir_contents {
use util::{paths::PathStyle, rel_path::RelPath};
- let repo_path: RepoPath = RelPath::new(path.strip_prefix(&workdir_path).unwrap(), PathStyle::local()).unwrap().into();
+ let repo_path = RelPath::new(path.strip_prefix(&workdir_path).unwrap(), PathStyle::local()).unwrap();
+ let repo_path = RepoPath::from_rel_path(&repo_path);
let status = statuses
.iter()
.find_map(|(p, status)| (*p == repo_path.as_unix_str()).then_some(status));
@@ -2587,6 +2661,12 @@ impl Fs for FakeFs {
Ok(true)
}
+ fn subscribe_to_jobs(&self) -> JobEventReceiver {
+ let (sender, receiver) = futures::channel::mpsc::unbounded();
+ self.state.lock().job_event_subscribers.lock().push(sender);
+ receiver
+ }
+
#[cfg(any(test, feature = "test-support"))]
fn as_fake(&self) -> Arc<FakeFs> {
self.this.upgrade().unwrap()
@@ -3201,6 +3281,8 @@ mod tests {
let fs = RealFs {
bundled_git_binary_path: None,
executor,
+ next_job_id: Arc::new(AtomicUsize::new(0)),
+ job_event_subscribers: Arc::new(Mutex::new(Vec::new())),
};
let temp_dir = TempDir::new().unwrap();
let file_to_be_replaced = temp_dir.path().join("file.txt");
@@ -3219,6 +3301,8 @@ mod tests {
let fs = RealFs {
bundled_git_binary_path: None,
executor,
+ next_job_id: Arc::new(AtomicUsize::new(0)),
+ job_event_subscribers: Arc::new(Mutex::new(Vec::new())),
};
let temp_dir = TempDir::new().unwrap();
let file_to_be_replaced = temp_dir.path().join("file.txt");
@@ -14,7 +14,6 @@ use rope::Rope;
use schemars::JsonSchema;
use serde::Deserialize;
use smol::io::{AsyncBufReadExt, AsyncReadExt, BufReader};
-use std::borrow::Cow;
use std::ffi::{OsStr, OsString};
use std::process::{ExitStatus, Stdio};
use std::{
@@ -431,7 +430,8 @@ pub trait GitRepository: Send + Sync {
fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>>;
fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>>;
- fn create_branch(&self, name: String) -> BoxFuture<'_, Result<()>>;
+ fn create_branch(&self, name: String, base_branch: Option<String>)
+ -> BoxFuture<'_, Result<()>>;
fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>>;
fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>>;
@@ -490,6 +490,7 @@ pub trait GitRepository: Send + Sync {
message: SharedString,
name_and_email: Option<(SharedString, SharedString)>,
options: CommitOptions,
+ askpass: AskPassDelegate,
env: Arc<HashMap<String, String>>,
) -> BoxFuture<'_, Result<()>>;
@@ -846,7 +847,7 @@ impl GitRepository for RealGitRepository {
}
files.push(CommitFile {
- path: rel_path.into(),
+ path: RepoPath(Arc::from(rel_path)),
old_text,
new_text,
})
@@ -1358,14 +1359,28 @@ impl GitRepository for RealGitRepository {
.boxed()
}
- fn create_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
- let repo = self.repository.clone();
+ fn create_branch(
+ &self,
+ name: String,
+ base_branch: Option<String>,
+ ) -> BoxFuture<'_, Result<()>> {
+ let git_binary_path = self.any_git_binary_path.clone();
+ let working_directory = self.working_directory();
+ let executor = self.executor.clone();
+
self.executor
.spawn(async move {
- let repo = repo.lock();
- let current_commit = repo.head()?.peel_to_commit()?;
- repo.branch(&name, ¤t_commit, false)?;
- Ok(())
+ let mut args = vec!["switch", "-c", &name];
+ let base_branch_str;
+ if let Some(ref base) = base_branch {
+ base_branch_str = base.clone();
+ args.push(&base_branch_str);
+ }
+
+ GitBinary::new(git_binary_path, working_directory?, executor)
+ .run(&args)
+ .await?;
+ anyhow::Ok(())
})
.boxed()
}
@@ -1615,41 +1630,39 @@ impl GitRepository for RealGitRepository {
message: SharedString,
name_and_email: Option<(SharedString, SharedString)>,
options: CommitOptions,
+ ask_pass: AskPassDelegate,
env: Arc<HashMap<String, String>>,
) -> BoxFuture<'_, Result<()>> {
let working_directory = self.working_directory();
let git_binary_path = self.any_git_binary_path.clone();
- self.executor
- .spawn(async move {
- let mut cmd = new_smol_command(git_binary_path);
- cmd.current_dir(&working_directory?)
- .envs(env.iter())
- .args(["commit", "--quiet", "-m"])
- .arg(&message.to_string())
- .arg("--cleanup=strip");
+ let executor = self.executor.clone();
+ async move {
+ let mut cmd = new_smol_command(git_binary_path);
+ cmd.current_dir(&working_directory?)
+ .envs(env.iter())
+ .args(["commit", "--quiet", "-m"])
+ .arg(&message.to_string())
+ .arg("--cleanup=strip")
+ .stdout(smol::process::Stdio::piped())
+ .stderr(smol::process::Stdio::piped());
- if options.amend {
- cmd.arg("--amend");
- }
+ if options.amend {
+ cmd.arg("--amend");
+ }
- if options.signoff {
- cmd.arg("--signoff");
- }
+ if options.signoff {
+ cmd.arg("--signoff");
+ }
- if let Some((name, email)) = name_and_email {
- cmd.arg("--author").arg(&format!("{name} <{email}>"));
- }
+ if let Some((name, email)) = name_and_email {
+ cmd.arg("--author").arg(&format!("{name} <{email}>"));
+ }
- let output = cmd.output().await?;
+ run_git_command(env, ask_pass, cmd, &executor).await?;
- anyhow::ensure!(
- output.status.success(),
- "Failed to commit:\n{}",
- String::from_utf8_lossy(&output.stderr)
- );
- Ok(())
- })
- .boxed()
+ Ok(())
+ }
+ .boxed()
}
fn push(
@@ -2035,6 +2048,11 @@ fn git_status_args(path_prefixes: &[RepoPath]) -> Vec<OsString> {
OsString::from("--no-renames"),
OsString::from("-z"),
];
+ args.extend(
+ path_prefixes
+ .iter()
+ .map(|path_prefix| path_prefix.as_std_path().into()),
+ );
args.extend(path_prefixes.iter().map(|path_prefix| {
if path_prefix.is_empty() {
Path::new(".").into()
@@ -2290,52 +2308,54 @@ async fn run_askpass_command(
}
}
-#[derive(Clone, Debug, Ord, Hash, PartialOrd, Eq, PartialEq)]
-pub struct RepoPath(pub Arc<RelPath>);
+#[derive(Clone, Ord, Hash, PartialOrd, Eq, PartialEq)]
+pub struct RepoPath(Arc<RelPath>);
+
+impl std::fmt::Debug for RepoPath {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.0.fmt(f)
+ }
+}
impl RepoPath {
pub fn new<S: AsRef<str> + ?Sized>(s: &S) -> Result<Self> {
let rel_path = RelPath::unix(s.as_ref())?;
- Ok(rel_path.into())
- }
-
- pub fn from_proto(proto: &str) -> Result<Self> {
- let rel_path = RelPath::from_proto(proto)?;
- Ok(rel_path.into())
+ Ok(Self::from_rel_path(rel_path))
}
pub fn from_std_path(path: &Path, path_style: PathStyle) -> Result<Self> {
let rel_path = RelPath::new(path, path_style)?;
- Ok(Self(rel_path.as_ref().into()))
+ Ok(Self::from_rel_path(&rel_path))
}
-}
-#[cfg(any(test, feature = "test-support"))]
-pub fn repo_path<S: AsRef<str> + ?Sized>(s: &S) -> RepoPath {
- RepoPath(RelPath::unix(s.as_ref()).unwrap().into())
-}
+ pub fn from_proto(proto: &str) -> Result<Self> {
+ let rel_path = RelPath::from_proto(proto)?;
+ Ok(Self(rel_path))
+ }
-impl From<&RelPath> for RepoPath {
- fn from(value: &RelPath) -> Self {
- RepoPath(value.into())
+ pub fn from_rel_path(path: &RelPath) -> RepoPath {
+ Self(Arc::from(path))
}
-}
-impl<'a> From<Cow<'a, RelPath>> for RepoPath {
- fn from(value: Cow<'a, RelPath>) -> Self {
- value.as_ref().into()
+ pub fn as_std_path(&self) -> &Path {
+ // git2 does not like empty paths and our RelPath infra turns `.` into ``
+ // so undo that here
+ if self.is_empty() {
+ Path::new(".")
+ } else {
+ self.0.as_std_path()
+ }
}
}
-impl From<Arc<RelPath>> for RepoPath {
- fn from(value: Arc<RelPath>) -> Self {
- RepoPath(value)
- }
+#[cfg(any(test, feature = "test-support"))]
+pub fn repo_path<S: AsRef<str> + ?Sized>(s: &S) -> RepoPath {
+ RepoPath(RelPath::unix(s.as_ref()).unwrap().into())
}
-impl Default for RepoPath {
- fn default() -> Self {
- RepoPath(RelPath::empty().into())
+impl AsRef<Arc<RelPath>> for RepoPath {
+ fn as_ref(&self) -> &Arc<RelPath> {
+ &self.0
}
}
@@ -2347,12 +2367,6 @@ impl std::ops::Deref for RepoPath {
}
}
-// impl AsRef<Path> for RepoPath {
-// fn as_ref(&self) -> &Path {
-// RelPath::as_ref(&self.0)
-// }
-// }
-
#[derive(Debug)]
pub struct RepoPathDescendants<'a>(pub &'a RepoPath);
@@ -2454,8 +2468,17 @@ mod tests {
use super::*;
use gpui::TestAppContext;
+ fn disable_git_global_config() {
+ unsafe {
+ std::env::set_var("GIT_CONFIG_GLOBAL", "");
+ std::env::set_var("GIT_CONFIG_SYSTEM", "");
+ }
+ }
+
#[gpui::test]
async fn test_checkpoint_basic(cx: &mut TestAppContext) {
+ disable_git_global_config();
+
cx.executor().allow_parking();
let repo_dir = tempfile::tempdir().unwrap();
@@ -2471,6 +2494,7 @@ mod tests {
cx.executor(),
)
.unwrap();
+
repo.stage_paths(vec![repo_path("file")], Arc::new(HashMap::default()))
.await
.unwrap();
@@ -2478,6 +2502,7 @@ mod tests {
"Initial commit".into(),
None,
CommitOptions::default(),
+ AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}),
Arc::new(checkpoint_author_envs()),
)
.await
@@ -2504,6 +2529,7 @@ mod tests {
"Commit after checkpoint".into(),
None,
CommitOptions::default(),
+ AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}),
Arc::new(checkpoint_author_envs()),
)
.await
@@ -2541,6 +2567,8 @@ mod tests {
#[gpui::test]
async fn test_checkpoint_empty_repo(cx: &mut TestAppContext) {
+ disable_git_global_config();
+
cx.executor().allow_parking();
let repo_dir = tempfile::tempdir().unwrap();
@@ -2585,6 +2613,8 @@ mod tests {
#[gpui::test]
async fn test_compare_checkpoints(cx: &mut TestAppContext) {
+ disable_git_global_config();
+
cx.executor().allow_parking();
let repo_dir = tempfile::tempdir().unwrap();
@@ -2624,6 +2654,8 @@ mod tests {
#[gpui::test]
async fn test_checkpoint_exclude_binary_files(cx: &mut TestAppContext) {
+ disable_git_global_config();
+
cx.executor().allow_parking();
let repo_dir = tempfile::tempdir().unwrap();
@@ -2654,6 +2686,7 @@ mod tests {
"Initial commit".into(),
None,
CommitOptions::default(),
+ AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}),
Arc::new(checkpoint_author_envs()),
)
.await
@@ -454,7 +454,7 @@ impl FromStr for GitStatus {
let status = entry.as_bytes()[0..2].try_into().unwrap();
let status = FileStatus::from_bytes(status).log_err()?;
// git-status outputs `/`-delimited repo paths, even on Windows.
- let path = RepoPath(RelPath::unix(path).log_err()?.into());
+ let path = RepoPath::from_rel_path(RelPath::unix(path).log_err()?);
Some((path, status))
})
.collect::<Vec<_>>();
@@ -539,7 +539,7 @@ impl FromStr for TreeDiff {
let mut fields = s.split('\0');
let mut parsed = HashMap::default();
while let Some((status, path)) = fields.next().zip(fields.next()) {
- let path = RepoPath(RelPath::unix(path)?.into());
+ let path = RepoPath::from_rel_path(RelPath::unix(path)?);
let mut fields = status.split(" ").skip(2);
let old_sha = fields
@@ -49,6 +49,8 @@ pub fn register_additional_providers(
provider_registry.register_hosting_provider(Arc::new(forgejo_self_hosted));
} else if let Ok(gitea_self_hosted) = Gitea::from_remote_url(&origin_url) {
provider_registry.register_hosting_provider(Arc::new(gitea_self_hosted));
+ } else if let Ok(bitbucket_self_hosted) = Bitbucket::from_remote_url(&origin_url) {
+ provider_registry.register_hosting_provider(Arc::new(bitbucket_self_hosted));
}
}
@@ -1,6 +1,7 @@
use std::str::FromStr;
use std::sync::LazyLock;
+use anyhow::{Result, bail};
use regex::Regex;
use url::Url;
@@ -9,6 +10,8 @@ use git::{
PullRequest, RemoteUrl,
};
+use crate::get_host_from_git_remote_url;
+
fn pull_request_regex() -> &'static Regex {
static PULL_REQUEST_REGEX: LazyLock<Regex> = LazyLock::new(|| {
// This matches Bitbucket PR reference pattern: (pull request #xxx)
@@ -33,6 +36,31 @@ impl Bitbucket {
pub fn public_instance() -> Self {
Self::new("Bitbucket", Url::parse("https://bitbucket.org").unwrap())
}
+
+ pub fn from_remote_url(remote_url: &str) -> Result<Self> {
+ let host = get_host_from_git_remote_url(remote_url)?;
+ if host == "bitbucket.org" {
+ bail!("the BitBucket instance is not self-hosted");
+ }
+
+ // TODO: detecting self hosted instances by checking whether "bitbucket" is in the url or not
+ // is not very reliable. See https://github.com/zed-industries/zed/issues/26393 for more
+ // information.
+ if !host.contains("bitbucket") {
+ bail!("not a BitBucket URL");
+ }
+
+ Ok(Self::new(
+ "BitBucket Self-Hosted",
+ Url::parse(&format!("https://{}", host))?,
+ ))
+ }
+
+ fn is_self_hosted(&self) -> bool {
+ self.base_url
+ .host_str()
+ .is_some_and(|host| host != "bitbucket.org")
+ }
}
impl GitHostingProvider for Bitbucket {
@@ -49,10 +77,16 @@ impl GitHostingProvider for Bitbucket {
}
fn format_line_number(&self, line: u32) -> String {
+ if self.is_self_hosted() {
+ return format!("{line}");
+ }
format!("lines-{line}")
}
fn format_line_numbers(&self, start_line: u32, end_line: u32) -> String {
+ if self.is_self_hosted() {
+ return format!("{start_line}-{end_line}");
+ }
format!("lines-{start_line}:{end_line}")
}
@@ -60,7 +94,7 @@ impl GitHostingProvider for Bitbucket {
let url = RemoteUrl::from_str(url).ok()?;
let host = url.host_str()?;
- if host != "bitbucket.org" {
+ if host != self.base_url.host_str()? {
return None;
}
@@ -81,7 +115,12 @@ impl GitHostingProvider for Bitbucket {
) -> Url {
let BuildCommitPermalinkParams { sha } = params;
let ParsedGitRemote { owner, repo } = remote;
-
+ if self.is_self_hosted() {
+ return self
+ .base_url()
+ .join(&format!("projects/{owner}/repos/{repo}/commits/{sha}"))
+ .unwrap();
+ }
self.base_url()
.join(&format!("{owner}/{repo}/commits/{sha}"))
.unwrap()
@@ -95,10 +134,18 @@ impl GitHostingProvider for Bitbucket {
selection,
} = params;
- let mut permalink = self
- .base_url()
- .join(&format!("{owner}/{repo}/src/{sha}/{path}"))
- .unwrap();
+ let mut permalink = if self.is_self_hosted() {
+ self.base_url()
+ .join(&format!(
+ "projects/{owner}/repos/{repo}/browse/{path}?at={sha}"
+ ))
+ .unwrap()
+ } else {
+ self.base_url()
+ .join(&format!("{owner}/{repo}/src/{sha}/{path}"))
+ .unwrap()
+ };
+
permalink.set_fragment(
selection
.map(|selection| self.line_fragment(&selection))
@@ -117,7 +164,14 @@ impl GitHostingProvider for Bitbucket {
// Construct the PR URL in Bitbucket format
let mut url = self.base_url();
- let path = format!("/{}/{}/pull-requests/{}", remote.owner, remote.repo, number);
+ let path = if self.is_self_hosted() {
+ format!(
+ "/projects/{}/repos/{}/pull-requests/{}",
+ remote.owner, remote.repo, number
+ )
+ } else {
+ format!("/{}/{}/pull-requests/{}", remote.owner, remote.repo, number)
+ };
url.set_path(&path);
Some(PullRequest { number, url })
@@ -176,6 +230,60 @@ mod tests {
);
}
+ #[test]
+ fn test_parse_remote_url_given_self_hosted_ssh_url() {
+ let remote_url = "git@bitbucket.company.com:zed-industries/zed.git";
+
+ let parsed_remote = Bitbucket::from_remote_url(remote_url)
+ .unwrap()
+ .parse_remote_url(remote_url)
+ .unwrap();
+
+ assert_eq!(
+ parsed_remote,
+ ParsedGitRemote {
+ owner: "zed-industries".into(),
+ repo: "zed".into(),
+ }
+ );
+ }
+
+ #[test]
+ fn test_parse_remote_url_given_self_hosted_https_url() {
+ let remote_url = "https://bitbucket.company.com/zed-industries/zed.git";
+
+ let parsed_remote = Bitbucket::from_remote_url(remote_url)
+ .unwrap()
+ .parse_remote_url(remote_url)
+ .unwrap();
+
+ assert_eq!(
+ parsed_remote,
+ ParsedGitRemote {
+ owner: "zed-industries".into(),
+ repo: "zed".into(),
+ }
+ );
+ }
+
+ #[test]
+ fn test_parse_remote_url_given_self_hosted_https_url_with_username() {
+ let remote_url = "https://thorstenballzed@bitbucket.company.com/zed-industries/zed.git";
+
+ let parsed_remote = Bitbucket::from_remote_url(remote_url)
+ .unwrap()
+ .parse_remote_url(remote_url)
+ .unwrap();
+
+ assert_eq!(
+ parsed_remote,
+ ParsedGitRemote {
+ owner: "zed-industries".into(),
+ repo: "zed".into(),
+ }
+ );
+ }
+
#[test]
fn test_build_bitbucket_permalink() {
let permalink = Bitbucket::public_instance().build_permalink(
@@ -190,6 +298,23 @@ mod tests {
assert_eq!(permalink.to_string(), expected_url.to_string())
}
+ #[test]
+ fn test_build_bitbucket_self_hosted_permalink() {
+ let permalink =
+ Bitbucket::from_remote_url("git@bitbucket.company.com:zed-industries/zed.git")
+ .unwrap()
+ .build_permalink(
+ ParsedGitRemote {
+ owner: "zed-industries".into(),
+ repo: "zed".into(),
+ },
+ BuildPermalinkParams::new("f00b4r", &repo_path("main.rs"), None),
+ );
+
+ let expected_url = "https://bitbucket.company.com/projects/zed-industries/repos/zed/browse/main.rs?at=f00b4r";
+ assert_eq!(permalink.to_string(), expected_url.to_string())
+ }
+
#[test]
fn test_build_bitbucket_permalink_with_single_line_selection() {
let permalink = Bitbucket::public_instance().build_permalink(
@@ -204,6 +329,23 @@ mod tests {
assert_eq!(permalink.to_string(), expected_url.to_string())
}
+ #[test]
+ fn test_build_bitbucket_self_hosted_permalink_with_single_line_selection() {
+ let permalink =
+ Bitbucket::from_remote_url("https://bitbucket.company.com/zed-industries/zed.git")
+ .unwrap()
+ .build_permalink(
+ ParsedGitRemote {
+ owner: "zed-industries".into(),
+ repo: "zed".into(),
+ },
+ BuildPermalinkParams::new("f00b4r", &repo_path("main.rs"), Some(6..6)),
+ );
+
+ let expected_url = "https://bitbucket.company.com/projects/zed-industries/repos/zed/browse/main.rs?at=f00b4r#7";
+ assert_eq!(permalink.to_string(), expected_url.to_string())
+ }
+
#[test]
fn test_build_bitbucket_permalink_with_multi_line_selection() {
let permalink = Bitbucket::public_instance().build_permalink(
@@ -219,6 +361,23 @@ mod tests {
assert_eq!(permalink.to_string(), expected_url.to_string())
}
+ #[test]
+ fn test_build_bitbucket_self_hosted_permalink_with_multi_line_selection() {
+ let permalink =
+ Bitbucket::from_remote_url("git@bitbucket.company.com:zed-industries/zed.git")
+ .unwrap()
+ .build_permalink(
+ ParsedGitRemote {
+ owner: "zed-industries".into(),
+ repo: "zed".into(),
+ },
+ BuildPermalinkParams::new("f00b4r", &repo_path("main.rs"), Some(23..47)),
+ );
+
+ let expected_url = "https://bitbucket.company.com/projects/zed-industries/repos/zed/browse/main.rs?at=f00b4r#24-48";
+ assert_eq!(permalink.to_string(), expected_url.to_string())
+ }
+
#[test]
fn test_bitbucket_pull_requests() {
use indoc::indoc;
@@ -248,4 +407,36 @@ mod tests {
"https://bitbucket.org/zed-industries/zed/pull-requests/123"
);
}
+
+ #[test]
+ fn test_bitbucket_self_hosted_pull_requests() {
+ use indoc::indoc;
+
+ let remote = ParsedGitRemote {
+ owner: "zed-industries".into(),
+ repo: "zed".into(),
+ };
+
+ let bitbucket =
+ Bitbucket::from_remote_url("https://bitbucket.company.com/zed-industries/zed.git")
+ .unwrap();
+
+ // Test message without PR reference
+ let message = "This does not contain a pull request";
+ assert!(bitbucket.extract_pull_request(&remote, message).is_none());
+
+ // Pull request number at end of first line
+ let message = indoc! {r#"
+ Merged in feature-branch (pull request #123)
+
+ Some detailed description of the changes.
+ "#};
+
+ let pr = bitbucket.extract_pull_request(&remote, message).unwrap();
+ assert_eq!(pr.number, 123);
+ assert_eq!(
+ pr.url.as_str(),
+ "https://bitbucket.company.com/projects/zed-industries/repos/zed/pull-requests/123"
+ );
+ }
}
@@ -241,18 +241,10 @@ impl BranchListDelegate {
return;
};
let new_branch_name = new_branch_name.to_string().replace(' ', "-");
+ let base_branch = from_branch.map(|b| b.to_string());
cx.spawn(async move |_, cx| {
- if let Some(based_branch) = from_branch {
- repo.update(cx, |repo, _| repo.change_branch(based_branch.to_string()))?
- .await??;
- }
-
- repo.update(cx, |repo, _| {
- repo.create_branch(new_branch_name.to_string())
- })?
- .await??;
repo.update(cx, |repo, _| {
- repo.change_branch(new_branch_name.to_string())
+ repo.create_branch(new_branch_name, base_branch)
})?
.await??;
@@ -266,7 +266,7 @@ impl language::File for GitBlob {
}
fn path(&self) -> &Arc<RelPath> {
- &self.path.0
+ self.path.as_ref()
}
fn full_path(&self, _: &App) -> PathBuf {
@@ -879,7 +879,7 @@ impl GitPanel {
let active_repository = self.active_repository.as_ref()?.downgrade();
cx.spawn(async move |_, cx| {
- let file_path_str = repo_path.0.display(PathStyle::Posix);
+ let file_path_str = repo_path.as_ref().display(PathStyle::Posix);
let repo_root = active_repository.read_with(cx, |repository, _| {
repository.snapshot().work_directory_abs_path
@@ -1074,7 +1074,7 @@ impl GitPanel {
}
let mut details = entries
.iter()
- .filter_map(|entry| entry.repo_path.0.file_name())
+ .filter_map(|entry| entry.repo_path.as_ref().file_name())
.map(|filename| filename.to_string())
.take(5)
.join("\n");
@@ -1129,7 +1129,7 @@ impl GitPanel {
.map(|entry| {
entry
.repo_path
- .0
+ .as_ref()
.file_name()
.map(|f| f.to_string())
.unwrap_or_default()
@@ -1585,6 +1585,7 @@ impl GitPanel {
return;
}
+ let askpass = self.askpass_delegate("git commit", window, cx);
let commit_message = self.custom_or_suggested_commit_message(window, cx);
let Some(mut message) = commit_message else {
@@ -1599,7 +1600,7 @@ impl GitPanel {
let task = if self.has_staged_changes() {
// Repository serializes all git operations, so we can just send a commit immediately
let commit_task = active_repository.update(cx, |repo, cx| {
- repo.commit(message.into(), None, options, cx)
+ repo.commit(message.into(), None, options, askpass, cx)
});
cx.background_spawn(async move { commit_task.await? })
} else {
@@ -1621,7 +1622,7 @@ impl GitPanel {
cx.spawn(async move |_, cx| {
stage_task.await?;
let commit_task = active_repository.update(cx, |repo, cx| {
- repo.commit(message.into(), None, options, cx)
+ repo.commit(message.into(), None, options, askpass, cx)
})?;
commit_task.await?
})
@@ -5646,7 +5647,7 @@ mod tests {
assert_eq!(
entry.status_entry().map(|status| status
.repo_path
- .0
+ .as_ref()
.as_std_path()
.to_string_lossy()
.to_string()),
@@ -336,7 +336,7 @@ impl ProjectDiff {
};
let repo = git_repo.read(cx);
let sort_prefix = sort_prefix(repo, &entry.repo_path, entry.status, cx);
- let path_key = PathKey::with_sort_prefix(sort_prefix, entry.repo_path.0);
+ let path_key = PathKey::with_sort_prefix(sort_prefix, entry.repo_path.as_ref().clone());
self.move_to_path(path_key, window, cx)
}
@@ -566,7 +566,7 @@ impl ProjectDiff {
for entry in buffers_to_load.iter() {
let sort_prefix = sort_prefix(&repo, &entry.repo_path, entry.file_status, cx);
let path_key =
- PathKey::with_sort_prefix(sort_prefix, entry.repo_path.0.clone());
+ PathKey::with_sort_prefix(sort_prefix, entry.repo_path.as_ref().clone());
previous_paths.remove(&path_key);
path_keys.push(path_key)
}
@@ -169,6 +169,13 @@ impl Application {
self
}
+ /// Configures when the application should automatically quit.
+ /// By default, [`QuitMode::Default`] is used.
+ pub fn with_quit_mode(self, mode: QuitMode) -> Self {
+ self.0.borrow_mut().quit_mode = mode;
+ self
+ }
+
/// Start the application. The provided callback will be called once the
/// app is fully launched.
pub fn run<F>(self, on_finish_launching: F)
@@ -238,6 +245,18 @@ type WindowClosedHandler = Box<dyn FnMut(&mut App)>;
type ReleaseListener = Box<dyn FnOnce(&mut dyn Any, &mut App) + 'static>;
type NewEntityListener = Box<dyn FnMut(AnyEntity, &mut Option<&mut Window>, &mut App) + 'static>;
+/// Defines when the application should automatically quit.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
+pub enum QuitMode {
+ /// Use [`QuitMode::Explicit`] on macOS and [`QuitMode::LastWindowClosed`] on other platforms.
+ #[default]
+ Default,
+ /// Quit automatically when the last window is closed.
+ LastWindowClosed,
+ /// Quit only when requested via [`App::quit`].
+ Explicit,
+}
+
#[doc(hidden)]
#[derive(Clone, PartialEq, Eq)]
pub struct SystemWindowTab {
@@ -588,6 +607,7 @@ pub struct App {
pub(crate) inspector_element_registry: InspectorElementRegistry,
#[cfg(any(test, feature = "test-support", debug_assertions))]
pub(crate) name: Option<&'static str>,
+ quit_mode: QuitMode,
quitting: bool,
}
@@ -659,6 +679,7 @@ impl App {
inspector_renderer: None,
#[cfg(any(feature = "inspector", debug_assertions))]
inspector_element_registry: InspectorElementRegistry::default(),
+ quit_mode: QuitMode::default(),
quitting: false,
#[cfg(any(test, feature = "test-support", debug_assertions))]
@@ -1172,6 +1193,12 @@ impl App {
self.http_client = new_client;
}
+ /// Configures when the application should automatically quit.
+ /// By default, [`QuitMode::Default`] is used.
+ pub fn set_quit_mode(&mut self, mode: QuitMode) {
+ self.quit_mode = mode;
+ }
+
/// Returns the SVG renderer used by the application.
pub fn svg_renderer(&self) -> SvgRenderer {
self.svg_renderer.clone()
@@ -1379,6 +1406,16 @@ impl App {
callback(cx);
true
});
+
+ let quit_on_empty = match cx.quit_mode {
+ QuitMode::Explicit => false,
+ QuitMode::LastWindowClosed => true,
+ QuitMode::Default => !cfg!(macos),
+ };
+
+ if quit_on_empty && cx.windows.is_empty() {
+ cx.quit();
+ }
} else {
cx.windows.get_mut(id)?.replace(window);
}
@@ -10,7 +10,9 @@ use crate::{
use anyhow::{anyhow, bail};
use futures::{Stream, StreamExt, channel::oneshot};
use rand::{SeedableRng, rngs::StdRng};
-use std::{cell::RefCell, future::Future, ops::Deref, rc::Rc, sync::Arc, time::Duration};
+use std::{
+ cell::RefCell, future::Future, ops::Deref, path::PathBuf, rc::Rc, sync::Arc, time::Duration,
+};
/// A TestAppContext is provided to tests created with `#[gpui::test]`, it provides
/// an implementation of `Context` with additional methods that are useful in tests.
@@ -331,6 +333,13 @@ impl TestAppContext {
self.test_window(window_handle).simulate_resize(size);
}
+ /// Returns true if there's an alert dialog open.
+ pub fn expect_restart(&self) -> oneshot::Receiver<Option<PathBuf>> {
+ let (tx, rx) = futures::channel::oneshot::channel();
+ self.test_platform.expect_restart.borrow_mut().replace(tx);
+ rx
+ }
+
/// Causes the given sources to be returned if the application queries for screen
/// capture sources.
pub fn set_screen_capture_sources(&self, sources: Vec<TestScreenCaptureSource>) {
@@ -281,7 +281,11 @@ impl BackgroundExecutor {
});
let mut cx = std::task::Context::from_waker(&waker);
- let duration = Duration::from_secs(180);
+ let duration = Duration::from_secs(
+ option_env!("GPUI_TEST_TIMEOUT")
+ .and_then(|s| s.parse::<u64>().ok())
+ .unwrap_or(180),
+ );
let mut test_should_end_by = Instant::now() + duration;
loop {
@@ -387,9 +387,6 @@ impl WaylandClientStatePtr {
{
state.keyboard_focused_window = Some(window);
}
- if state.windows.is_empty() {
- state.common.signal.stop();
- }
}
}
@@ -246,10 +246,6 @@ impl X11ClientStatePtr {
state.keyboard_focused_window = None;
}
state.cursor_styles.remove(&x_window);
-
- if state.windows.is_empty() {
- state.common.signal.stop();
- }
}
pub fn update_ime_position(&self, bounds: Bounds<Pixels>) {
@@ -36,6 +36,7 @@ pub(crate) struct TestPlatform {
screen_capture_sources: RefCell<Vec<TestScreenCaptureSource>>,
pub opened_url: RefCell<Option<String>>,
pub text_system: Arc<dyn PlatformTextSystem>,
+ pub expect_restart: RefCell<Option<oneshot::Sender<Option<PathBuf>>>>,
#[cfg(target_os = "windows")]
bitmap_factory: std::mem::ManuallyDrop<IWICImagingFactory>,
weak: Weak<Self>,
@@ -112,6 +113,7 @@ impl TestPlatform {
active_cursor: Default::default(),
active_display: Rc::new(TestDisplay::new()),
active_window: Default::default(),
+ expect_restart: Default::default(),
current_clipboard_item: Mutex::new(None),
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
current_primary_item: Mutex::new(None),
@@ -250,8 +252,10 @@ impl Platform for TestPlatform {
fn quit(&self) {}
- fn restart(&self, _: Option<PathBuf>) {
- //
+ fn restart(&self, path: Option<PathBuf>) {
+ if let Some(tx) = self.expect_restart.take() {
+ tx.send(path).unwrap();
+ }
}
fn activate(&self, _ignoring_other_apps: bool) {
@@ -487,14 +487,12 @@ impl WindowsWindowInner {
let scale_factor = lock.scale_factor;
let wheel_scroll_amount = match modifiers.shift {
true => {
- self.system_settings
- .borrow()
+ self.system_settings()
.mouse_wheel_settings
.wheel_scroll_chars
}
false => {
- self.system_settings
- .borrow()
+ self.system_settings()
.mouse_wheel_settings
.wheel_scroll_lines
}
@@ -541,8 +539,7 @@ impl WindowsWindowInner {
};
let scale_factor = lock.scale_factor;
let wheel_scroll_chars = self
- .system_settings
- .borrow()
+ .system_settings()
.mouse_wheel_settings
.wheel_scroll_chars;
drop(lock);
@@ -677,8 +674,7 @@ impl WindowsWindowInner {
// used by Chrome. However, it may result in one row of pixels being obscured
// in our client area. But as Chrome says, "there seems to be no better solution."
if is_maximized
- && let Some(ref taskbar_position) =
- self.system_settings.borrow().auto_hide_taskbar_position
+ && let Some(ref taskbar_position) = self.system_settings().auto_hide_taskbar_position
{
// For the auto-hide taskbar, adjust in by 1 pixel on taskbar edge,
// so the window isn't treated as a "fullscreen app", which would cause
@@ -1072,7 +1068,7 @@ impl WindowsWindowInner {
lock.border_offset.update(handle).log_err();
// system settings may emit a window message which wants to take the refcell lock, so drop it
drop(lock);
- self.system_settings.borrow_mut().update(display, wparam.0);
+ self.system_settings_mut().update(display, wparam.0);
} else {
self.handle_system_theme_changed(handle, lparam)?;
};
@@ -342,9 +342,8 @@ impl Platform for WindowsPlatform {
}
}
- if let Some(ref mut callback) = self.inner.state.borrow_mut().callbacks.quit {
- callback();
- }
+ self.inner
+ .with_callback(|callbacks| &mut callbacks.quit, |callback| callback());
}
fn quit(&self) {
@@ -578,14 +577,13 @@ impl Platform for WindowsPlatform {
fn set_cursor_style(&self, style: CursorStyle) {
let hcursor = load_cursor(style);
- let mut lock = self.inner.state.borrow_mut();
- if lock.current_cursor.map(|c| c.0) != hcursor.map(|c| c.0) {
+ if self.inner.state.borrow_mut().current_cursor.map(|c| c.0) != hcursor.map(|c| c.0) {
self.post_message(
WM_GPUI_CURSOR_STYLE_CHANGED,
WPARAM(0),
LPARAM(hcursor.map_or(0, |c| c.0 as isize)),
);
- lock.current_cursor = hcursor;
+ self.inner.state.borrow_mut().current_cursor = hcursor;
}
}
@@ -724,6 +722,18 @@ impl WindowsPlatformInner {
}))
}
+ /// Calls `project` to project to the corresponding callback field, removes it from callbacks, calls `f` with the callback and then puts the callback back.
+ fn with_callback<T>(
+ &self,
+ project: impl Fn(&mut PlatformCallbacks) -> &mut Option<T>,
+ f: impl FnOnce(&mut T),
+ ) {
+ if let Some(mut callback) = project(&mut self.state.borrow_mut().callbacks).take() {
+ f(&mut callback);
+ *project(&mut self.state.borrow_mut().callbacks) = Some(callback)
+ }
+ }
+
fn handle_msg(
self: &Rc<Self>,
handle: HWND,
@@ -753,9 +763,7 @@ impl WindowsPlatformInner {
}
match message {
WM_GPUI_CLOSE_ONE_WINDOW => {
- if self.close_one_window(HWND(lparam.0 as _)) {
- unsafe { PostQuitMessage(0) };
- }
+ self.close_one_window(HWND(lparam.0 as _));
Some(0)
}
WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD => self.run_foreground_task(),
@@ -809,40 +817,36 @@ impl WindowsPlatformInner {
}
fn handle_dock_action_event(&self, action_idx: usize) -> Option<isize> {
- let mut lock = self.state.borrow_mut();
- let mut callback = lock.callbacks.app_menu_action.take()?;
- let Some(action) = lock
+ let Some(action) = self
+ .state
+ .borrow_mut()
.jump_list
.dock_menus
.get(action_idx)
.map(|dock_menu| dock_menu.action.boxed_clone())
else {
- lock.callbacks.app_menu_action = Some(callback);
log::error!("Dock menu for index {action_idx} not found");
return Some(1);
};
- drop(lock);
- callback(&*action);
- self.state.borrow_mut().callbacks.app_menu_action = Some(callback);
+ self.with_callback(
+ |callbacks| &mut callbacks.app_menu_action,
+ |callback| callback(&*action),
+ );
Some(0)
}
fn handle_keyboard_layout_change(&self) -> Option<isize> {
- let mut callback = self
- .state
- .borrow_mut()
- .callbacks
- .keyboard_layout_change
- .take()?;
- callback();
- self.state.borrow_mut().callbacks.keyboard_layout_change = Some(callback);
+ self.with_callback(
+ |callbacks| &mut callbacks.keyboard_layout_change,
+ |callback| callback(),
+ );
Some(0)
}
fn handle_device_lost(&self, lparam: LPARAM) -> Option<isize> {
- let mut lock = self.state.borrow_mut();
let directx_devices = lparam.0 as *const DirectXDevices;
let directx_devices = unsafe { &*directx_devices };
+ let mut lock = self.state.borrow_mut();
lock.directx_devices.take();
lock.directx_devices = Some(directx_devices.clone());
@@ -63,7 +63,7 @@ pub(crate) struct WindowsWindowInner {
hwnd: HWND,
drop_target_helper: IDropTargetHelper,
pub(crate) state: RefCell<WindowsWindowState>,
- pub(crate) system_settings: RefCell<WindowsSystemSettings>,
+ system_settings: RefCell<WindowsSystemSettings>,
pub(crate) handle: AnyWindowHandle,
pub(crate) hide_title_bar: bool,
pub(crate) is_movable: bool,
@@ -321,6 +321,14 @@ impl WindowsWindowInner {
}
Ok(())
}
+
+ pub(crate) fn system_settings(&self) -> std::cell::Ref<'_, WindowsSystemSettings> {
+ self.system_settings.borrow()
+ }
+
+ pub(crate) fn system_settings_mut(&self) -> std::cell::RefMut<'_, WindowsSystemSettings> {
+ self.system_settings.borrow_mut()
+ }
}
#[derive(Default)]
@@ -453,8 +461,9 @@ impl WindowsWindow {
// Failure to create a `WindowsWindowState` can cause window creation to fail,
// so check the inner result first.
- let this = context.inner.take().unwrap()?;
+ let this = context.inner.take().transpose()?;
let hwnd = creation_result?;
+ let this = this.unwrap();
register_drag_drop(&this)?;
configure_dwm_dark_mode(hwnd, appearance);
@@ -31,6 +31,7 @@ parking_lot.workspace = true
reqwest.workspace = true
serde.workspace = true
serde_json.workspace = true
+serde_urlencoded.workspace = true
sha2.workspace = true
tempfile.workspace = true
url.workspace = true
@@ -13,6 +13,7 @@ use futures::{
future::{self, BoxFuture},
};
use parking_lot::Mutex;
+use serde::Serialize;
#[cfg(feature = "test-support")]
use std::fmt;
use std::{any::type_name, sync::Arc};
@@ -255,7 +256,7 @@ impl HttpClientWithUrl {
}
/// Builds a Zed Cloud URL using the given path.
- pub fn build_zed_cloud_url(&self, path: &str, query: &[(&str, &str)]) -> Result<Url> {
+ pub fn build_zed_cloud_url(&self, path: &str) -> Result<Url> {
let base_url = self.base_url();
let base_api_url = match base_url.as_ref() {
"https://zed.dev" => "https://cloud.zed.dev",
@@ -264,10 +265,20 @@ impl HttpClientWithUrl {
other => other,
};
- Ok(Url::parse_with_params(
- &format!("{}{}", base_api_url, path),
- query,
- )?)
+ Ok(Url::parse(&format!("{}{}", base_api_url, path))?)
+ }
+
+ /// Builds a Zed Cloud URL using the given path and query params.
+ pub fn build_zed_cloud_url_with_query(&self, path: &str, query: impl Serialize) -> Result<Url> {
+ let base_url = self.base_url();
+ let base_api_url = match base_url.as_ref() {
+ "https://zed.dev" => "https://cloud.zed.dev",
+ "https://staging.zed.dev" => "https://cloud.zed.dev",
+ "http://localhost:3000" => "http://localhost:8787",
+ other => other,
+ };
+ let query = serde_urlencoded::to_string(&query)?;
+ Ok(Url::parse(&format!("{}{}?{}", base_api_url, path, query))?)
}
/// Builds a Zed LLM URL using the given path.
@@ -35,6 +35,7 @@ pub enum IconName {
ArrowUp,
ArrowUpRight,
Attach,
+ AtSign,
AudioOff,
AudioOn,
Backspace,
@@ -173,9 +173,15 @@ pub fn new_journal_entry(workspace: &Workspace, window: &mut Window, cx: &mut Ap
}
fn journal_dir(path: &str) -> Option<PathBuf> {
- shellexpand::full(path) //TODO handle this better
- .ok()
- .map(|dir| Path::new(&dir.to_string()).to_path_buf().join("journal"))
+ let expanded = shellexpand::full(path).ok()?;
+ let base_path = Path::new(expanded.as_ref());
+ let absolute_path = if base_path.is_absolute() {
+ base_path.to_path_buf()
+ } else {
+ log::warn!("Invalid journal path {path:?} (not absolute), falling back to home directory",);
+ std::env::home_dir()?
+ };
+ Some(absolute_path.join("journal"))
}
fn heading_entry(now: NaiveTime, hour_format: &HourFormat) -> String {
@@ -224,4 +230,65 @@ mod tests {
assert_eq!(actual_heading_entry, expected_heading_entry);
}
}
+
+ mod journal_dir_tests {
+ use super::super::*;
+
+ #[test]
+ #[cfg(target_family = "unix")]
+ fn test_absolute_unix_path() {
+ let result = journal_dir("/home/user");
+ assert!(result.is_some());
+ let path = result.unwrap();
+ assert!(path.is_absolute());
+ assert_eq!(path, PathBuf::from("/home/user/journal"));
+ }
+
+ #[test]
+ fn test_tilde_expansion() {
+ let result = journal_dir("~/documents");
+ assert!(result.is_some());
+ let path = result.unwrap();
+
+ assert!(path.is_absolute(), "Tilde should expand to absolute path");
+
+ if let Some(home) = std::env::home_dir() {
+ assert_eq!(path, home.join("documents").join("journal"));
+ }
+ }
+
+ #[test]
+ fn test_relative_path_falls_back_to_home() {
+ for relative_path in ["relative/path", "NONEXT/some/path", "../some/path"] {
+ let result = journal_dir(relative_path);
+ assert!(result.is_some(), "Failed for path: {}", relative_path);
+ let path = result.unwrap();
+
+ assert!(
+ path.is_absolute(),
+ "Path should be absolute for input '{}', got: {:?}",
+ relative_path,
+ path
+ );
+
+ if let Some(home) = std::env::home_dir() {
+ assert_eq!(
+ path,
+ home.join("journal"),
+ "Should fall back to home directory for input '{}'",
+ relative_path
+ );
+ }
+ }
+ }
+
+ #[test]
+ #[cfg(target_os = "windows")]
+ fn test_absolute_path_windows_style() {
+ let result = journal_dir("C:\\Users\\user\\Documents");
+ assert!(result.is_some());
+ let path = result.unwrap();
+ assert_eq!(path, PathBuf::from("C:\\Users\\user\\Documents\\journal"));
+ }
+ }
}
@@ -1030,22 +1030,22 @@
"$ref": "#"
},
"eslintConfig": {
- "$ref": "https://json.schemastore.org/eslintrc.json"
+ "$ref": "https://www.schemastore.org/eslintrc.json"
},
"prettier": {
- "$ref": "https://json.schemastore.org/prettierrc.json"
+ "$ref": "https://www.schemastore.org/prettierrc.json"
},
"stylelint": {
- "$ref": "https://json.schemastore.org/stylelintrc.json"
+ "$ref": "https://www.schemastore.org/stylelintrc.json"
},
"ava": {
- "$ref": "https://json.schemastore.org/ava.json"
+ "$ref": "https://www.schemastore.org/ava.json"
},
"release": {
- "$ref": "https://json.schemastore.org/semantic-release.json"
+ "$ref": "https://www.schemastore.org/semantic-release.json"
},
"jscpd": {
- "$ref": "https://json.schemastore.org/jscpd.json"
+ "$ref": "https://www.schemastore.org/jscpd.json"
},
"pnpm": {
"description": "Defines pnpm specific configuration.",
@@ -1305,5 +1305,5 @@
]
}
],
- "$id": "https://json.schemastore.org/package.json"
+ "$id": "https://www.schemastore.org/package.json"
}
@@ -1466,7 +1466,7 @@
}
}
},
- "id": "https://json.schemastore.org/tsconfig",
+ "id": "https://www.schemastore.org/tsconfig",
"title": "JSON schema for the TypeScript compiler's configuration file",
"type": "object"
}
@@ -2618,6 +2618,9 @@ pub fn rust_lang() -> Arc<Language> {
Some(tree_sitter_rust::LANGUAGE.into()),
)
.with_queries(LanguageQueries {
+ outline: Some(Cow::from(include_str!(
+ "../../languages/src/rust/outline.scm"
+ ))),
indents: Some(Cow::from(
r#"
[
@@ -381,10 +381,13 @@ impl OllamaLanguageModel {
thinking = Some(text)
}
MessageContent::ToolUse(tool_use) => {
- tool_calls.push(OllamaToolCall::Function(OllamaFunctionCall {
- name: tool_use.name.to_string(),
- arguments: tool_use.input,
- }));
+ tool_calls.push(OllamaToolCall {
+ id: Some(tool_use.id.to_string()),
+ function: OllamaFunctionCall {
+ name: tool_use.name.to_string(),
+ arguments: tool_use.input,
+ },
+ });
}
_ => (),
}
@@ -575,25 +578,23 @@ fn map_to_language_model_completion_events(
}
if let Some(tool_call) = tool_calls.and_then(|v| v.into_iter().next()) {
- match tool_call {
- OllamaToolCall::Function(function) => {
- let tool_id = format!(
- "{}-{}",
- &function.name,
- TOOL_CALL_COUNTER.fetch_add(1, Ordering::Relaxed)
- );
- let event =
- LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
- id: LanguageModelToolUseId::from(tool_id),
- name: Arc::from(function.name),
- raw_input: function.arguments.to_string(),
- input: function.arguments,
- is_input_complete: true,
- });
- events.push(Ok(event));
- state.used_tools = true;
- }
- }
+ let OllamaToolCall { id, function } = tool_call;
+ let id = id.unwrap_or_else(|| {
+ format!(
+ "{}-{}",
+ &function.name,
+ TOOL_CALL_COUNTER.fetch_add(1, Ordering::Relaxed)
+ )
+ });
+ let event = LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
+ id: LanguageModelToolUseId::from(id),
+ name: Arc::from(function.name),
+ raw_input: function.arguments.to_string(),
+ input: function.arguments,
+ is_input_complete: true,
+ });
+ events.push(Ok(event));
+ state.used_tools = true;
} else if !content.is_empty() {
events.push(Ok(LanguageModelCompletionEvent::Text(content)));
}
@@ -1053,11 +1053,16 @@ impl Render for LspButton {
(None, "All Servers Operational")
};
- let lsp_button = cx.entity();
+ let lsp_button = cx.weak_entity();
div().child(
PopoverMenu::new("lsp-tool")
- .menu(move |_, cx| lsp_button.read(cx).lsp_menu.clone())
+ .menu(move |_, cx| {
+ lsp_button
+ .read_with(cx, |lsp_button, _| lsp_button.lsp_menu.clone())
+ .ok()
+ .flatten()
+ })
.anchor(Corner::BottomLeft)
.with_handle(self.popover_menu_handle.clone())
.trigger_with_tooltip(
@@ -163,6 +163,45 @@ impl LspAdapter for TyLspAdapter {
Self::SERVER_NAME
}
+ async fn label_for_completion(
+ &self,
+ item: &lsp::CompletionItem,
+ language: &Arc<language::Language>,
+ ) -> Option<language::CodeLabel> {
+ let label = &item.label;
+ let label_len = label.len();
+ let grammar = language.grammar()?;
+ let highlight_id = match item.kind? {
+ lsp::CompletionItemKind::METHOD => grammar.highlight_id_for_name("function.method"),
+ lsp::CompletionItemKind::FUNCTION => grammar.highlight_id_for_name("function"),
+ lsp::CompletionItemKind::CLASS => grammar.highlight_id_for_name("type"),
+ lsp::CompletionItemKind::CONSTANT => grammar.highlight_id_for_name("constant"),
+ lsp::CompletionItemKind::VARIABLE => grammar.highlight_id_for_name("variable"),
+ _ => {
+ return None;
+ }
+ };
+
+ let mut text = label.clone();
+ if let Some(completion_details) = item
+ .label_details
+ .as_ref()
+ .and_then(|details| details.detail.as_ref())
+ {
+ write!(&mut text, " {}", completion_details).ok();
+ }
+
+ Some(language::CodeLabel::filtered(
+ text,
+ label_len,
+ item.filter_text.as_deref(),
+ highlight_id
+ .map(|id| (0..label_len, id))
+ .into_iter()
+ .collect(),
+ ))
+ }
+
async fn workspace_configuration(
self: Arc<Self>,
delegate: &Arc<dyn LspAdapterDelegate>,
@@ -195,7 +195,9 @@
(unary_expression "!" @operator)
operator: "/" @operator
-(lifetime) @lifetime
+(lifetime
+ "'" @lifetime
+ (identifier) @lifetime)
(parameter (identifier) @variable.parameter)
@@ -102,9 +102,11 @@ pub enum ChatMessage {
}
#[derive(Serialize, Deserialize, Debug)]
-#[serde(rename_all = "lowercase")]
-pub enum OllamaToolCall {
- Function(OllamaFunctionCall),
+pub struct OllamaToolCall {
+ // TODO: Remove `Option` after most users have updated to Ollama v0.12.10,
+ // which was released on the 4th of November 2025
+ pub id: Option<String>,
+ pub function: OllamaFunctionCall,
}
#[derive(Serialize, Deserialize, Debug)]
@@ -444,6 +446,7 @@ mod tests {
"content": "",
"tool_calls": [
{
+ "id": "call_llama3.2:3b_145155",
"function": {
"name": "weather",
"arguments": {
@@ -479,6 +482,56 @@ mod tests {
}
}
+ // Backwards compatibility with Ollama versions prior to v0.12.10 November 2025
+ // This test is a copy of `parse_tool_call()` with the `id` field omitted.
+ #[test]
+ fn parse_tool_call_pre_0_12_10() {
+ let response = serde_json::json!({
+ "model": "llama3.2:3b",
+ "created_at": "2025-04-28T20:02:02.140489Z",
+ "message": {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "function": {
+ "name": "weather",
+ "arguments": {
+ "city": "london",
+ }
+ }
+ }
+ ]
+ },
+ "done_reason": "stop",
+ "done": true,
+ "total_duration": 2758629166u64,
+ "load_duration": 1770059875,
+ "prompt_eval_count": 147,
+ "prompt_eval_duration": 684637583,
+ "eval_count": 16,
+ "eval_duration": 302561917,
+ });
+
+ let result: ChatResponseDelta = serde_json::from_value(response).unwrap();
+ match result.message {
+ ChatMessage::Assistant {
+ content,
+ tool_calls: Some(tool_calls),
+ images: _,
+ thinking,
+ } => {
+ assert!(content.is_empty());
+ assert!(thinking.is_none());
+
+ // When the `Option` around `id` is removed, this test should complain
+ // and be subsequently deleted in favor of `parse_tool_call()`
+ assert!(tool_calls.first().is_some_and(|call| call.id.is_none()))
+ }
+ _ => panic!("Deserialized wrong role"),
+ }
+ }
+
#[test]
fn parse_show_model() {
let response = serde_json::json!({
@@ -5,7 +5,7 @@ use fs::Fs;
use gpui::{Action, App, IntoElement};
use settings::{BaseKeymap, Settings, update_settings_file};
use theme::{
- Appearance, SystemAppearance, ThemeMode, ThemeName, ThemeRegistry, ThemeSelection,
+ Appearance, SystemAppearance, ThemeAppearanceMode, ThemeName, ThemeRegistry, ThemeSelection,
ThemeSettings,
};
use ui::{
@@ -44,8 +44,8 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
let theme_mode = theme_selection
.mode()
.unwrap_or_else(|| match *system_appearance {
- Appearance::Light => ThemeMode::Light,
- Appearance::Dark => ThemeMode::Dark,
+ Appearance::Light => ThemeAppearanceMode::Light,
+ Appearance::Dark => ThemeAppearanceMode::Dark,
});
return v_flex()
@@ -54,7 +54,12 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
h_flex().justify_between().child(Label::new("Theme")).child(
ToggleButtonGroup::single_row(
"theme-selector-onboarding-dark-light",
- [ThemeMode::Light, ThemeMode::Dark, ThemeMode::System].map(|mode| {
+ [
+ ThemeAppearanceMode::Light,
+ ThemeAppearanceMode::Dark,
+ ThemeAppearanceMode::System,
+ ]
+ .map(|mode| {
const MODE_NAMES: [SharedString; 3] = [
SharedString::new_static("Light"),
SharedString::new_static("Dark"),
@@ -100,13 +105,13 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
let theme_mode = theme_selection
.mode()
.unwrap_or_else(|| match *system_appearance {
- Appearance::Light => ThemeMode::Light,
- Appearance::Dark => ThemeMode::Dark,
+ Appearance::Light => ThemeAppearanceMode::Light,
+ Appearance::Dark => ThemeAppearanceMode::Dark,
});
let appearance = match theme_mode {
- ThemeMode::Light => Appearance::Light,
- ThemeMode::Dark => Appearance::Dark,
- ThemeMode::System => *system_appearance,
+ ThemeAppearanceMode::Light => Appearance::Light,
+ ThemeAppearanceMode::Dark => Appearance::Dark,
+ ThemeAppearanceMode::System => *system_appearance,
};
let current_theme_name: SharedString = theme_selection.name(appearance).0.into();
@@ -164,7 +169,7 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
}
})
.map(|this| {
- if theme_mode == ThemeMode::System {
+ if theme_mode == ThemeAppearanceMode::System {
let (light, dark) = (
theme_registry.get(LIGHT_THEMES[index]).unwrap(),
theme_registry.get(DARK_THEMES[index]).unwrap(),
@@ -189,23 +194,27 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement
})
}
- fn write_mode_change(mode: ThemeMode, cx: &mut App) {
+ fn write_mode_change(mode: ThemeAppearanceMode, cx: &mut App) {
let fs = <dyn Fs>::global(cx);
update_settings_file(fs, cx, move |settings, _cx| {
theme::set_mode(settings, mode);
});
}
- fn write_theme_change(theme: impl Into<Arc<str>>, theme_mode: ThemeMode, cx: &mut App) {
+ fn write_theme_change(
+ theme: impl Into<Arc<str>>,
+ theme_mode: ThemeAppearanceMode,
+ cx: &mut App,
+ ) {
let fs = <dyn Fs>::global(cx);
let theme = theme.into();
update_settings_file(fs, cx, move |settings, cx| {
- if theme_mode == ThemeMode::System {
+ if theme_mode == ThemeAppearanceMode::System {
let (light_theme, dark_theme) =
get_theme_family_themes(&theme).unwrap_or((theme.as_ref(), theme.as_ref()));
settings.theme.theme = Some(settings::ThemeSelection::Dynamic {
- mode: ThemeMode::System,
+ mode: ThemeAppearanceMode::System,
light: ThemeName(light_theme.into()),
dark: ThemeName(dark_theme.into()),
});
@@ -6619,13 +6619,11 @@ outline: struct OutlineEntryExcerpt
format!(
r#"frontend-project/
public/lottie/
- syntax-tree.json
- search: {{ "something": "Β«staticΒ»" }}
+ syntax-tree.json <==== selected
src/
app/(site)/
components/
- ErrorBoundary.tsx <==== selected
- search: Β«staticΒ»"#
+ ErrorBoundary.tsx"#
)
);
});
@@ -6667,7 +6665,7 @@ outline: struct OutlineEntryExcerpt
format!(
r#"frontend-project/
public/lottie/
- syntax-tree.json
+ syntax-tree.json <==== selected
search: {{ "something": "Β«staticΒ»" }}
src/
app/(site)/
@@ -6678,7 +6676,7 @@ outline: struct OutlineEntryExcerpt
page.tsx
search: Β«staticΒ»
components/
- ErrorBoundary.tsx <==== selected
+ ErrorBoundary.tsx
search: Β«staticΒ»"#
)
);
@@ -759,6 +759,18 @@ impl AgentServerStore {
}
})
}
+
+ pub fn get_extension_id_for_agent(
+ &mut self,
+ name: &ExternalAgentServerName,
+ ) -> Option<Arc<str>> {
+ self.external_agents.get_mut(name).and_then(|agent| {
+ agent
+ .as_any_mut()
+ .downcast_ref::<LocalExtensionArchiveAgent>()
+ .map(|ext_agent| ext_agent.extension_id.clone())
+ })
+ }
}
fn get_or_npm_install_builtin_agent(
@@ -261,7 +261,10 @@ impl DapStore {
.get(&adapter.name());
let user_installed_path = dap_settings.and_then(|s| match &s.binary {
DapBinary::Default => None,
- DapBinary::Custom(binary) => Some(PathBuf::from(binary)),
+ DapBinary::Custom(binary) => {
+ // if `binary` is absolute, `.join()` will keep it unmodified
+ Some(worktree.read(cx).abs_path().join(PathBuf::from(binary)))
+ }
});
let user_args = dap_settings.map(|s| s.args.clone());
let user_env = dap_settings.map(|s| s.env.clone());
@@ -55,9 +55,10 @@ use rpc::{
proto::{self, git_reset, split_repository_update},
};
use serde::Deserialize;
+use settings::WorktreeId;
use std::{
cmp::Ordering,
- collections::{BTreeSet, VecDeque},
+ collections::{BTreeSet, HashSet, VecDeque},
future::Future,
mem,
ops::Range,
@@ -89,6 +90,7 @@ pub struct GitStore {
buffer_store: Entity<BufferStore>,
worktree_store: Entity<WorktreeStore>,
repositories: HashMap<RepositoryId, Entity<Repository>>,
+ worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
active_repo_id: Option<RepositoryId>,
#[allow(clippy::type_complexity)]
loading_diffs:
@@ -225,7 +227,7 @@ impl sum_tree::Item for StatusEntry {
fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
PathSummary {
- max_path: self.repo_path.0.clone(),
+ max_path: self.repo_path.as_ref().clone(),
item_summary: self.status.summary(),
}
}
@@ -235,7 +237,7 @@ impl sum_tree::KeyedItem for StatusEntry {
type Key = PathKey;
fn key(&self) -> Self::Key {
- PathKey(self.repo_path.0.clone())
+ PathKey(self.repo_path.as_ref().clone())
}
}
@@ -409,6 +411,7 @@ impl GitStore {
buffer_store,
worktree_store,
repositories: HashMap::default(),
+ worktree_ids: HashMap::default(),
active_repo_id: None,
_subscriptions,
loading_diffs: HashMap::default(),
@@ -987,7 +990,7 @@ impl GitStore {
RepositoryState::Local { backend, .. } => backend
.blame(repo_path.clone(), content)
.await
- .with_context(|| format!("Failed to blame {:?}", repo_path.0))
+ .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
.map(Some),
RepositoryState::Remote { project_id, client } => {
let response = client
@@ -1167,6 +1170,7 @@ impl GitStore {
return;
}
self.update_repositories_from_worktree(
+ *worktree_id,
project_environment.clone(),
next_repository_id.clone(),
downstream
@@ -1178,6 +1182,45 @@ impl GitStore {
);
self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
}
+ WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
+ let repos_without_worktree: Vec<RepositoryId> = self
+ .worktree_ids
+ .iter_mut()
+ .filter_map(|(repo_id, worktree_ids)| {
+ worktree_ids.remove(worktree_id);
+ if worktree_ids.is_empty() {
+ Some(*repo_id)
+ } else {
+ None
+ }
+ })
+ .collect();
+ let is_active_repo_removed = repos_without_worktree
+ .iter()
+ .any(|repo_id| self.active_repo_id == Some(*repo_id));
+
+ for repo_id in repos_without_worktree {
+ self.repositories.remove(&repo_id);
+ self.worktree_ids.remove(&repo_id);
+ if let Some(updates_tx) =
+ downstream.as_ref().map(|downstream| &downstream.updates_tx)
+ {
+ updates_tx
+ .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
+ .ok();
+ }
+ }
+
+ if is_active_repo_removed {
+ if let Some((&repo_id, _)) = self.repositories.iter().next() {
+ self.active_repo_id = Some(repo_id);
+ cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
+ } else {
+ self.active_repo_id = None;
+ cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
+ }
+ }
+ }
_ => {}
}
}
@@ -1228,6 +1271,7 @@ impl GitStore {
/// Update our list of repositories and schedule git scans in response to a notification from a worktree,
fn update_repositories_from_worktree(
&mut self,
+ worktree_id: WorktreeId,
project_environment: Entity<ProjectEnvironment>,
next_repository_id: Arc<AtomicU64>,
updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
@@ -1245,15 +1289,25 @@ impl GitStore {
|| Some(&existing_work_directory_abs_path)
== update.new_work_directory_abs_path.as_ref()
}) {
+ let repo_id = *id;
if let Some(new_work_directory_abs_path) =
update.new_work_directory_abs_path.clone()
{
+ self.worktree_ids
+ .entry(repo_id)
+ .or_insert_with(HashSet::new)
+ .insert(worktree_id);
existing.update(cx, |existing, cx| {
existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
existing.schedule_scan(updates_tx.clone(), cx);
});
} else {
- removed_ids.push(*id);
+ if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
+ worktree_ids.remove(&worktree_id);
+ if worktree_ids.is_empty() {
+ removed_ids.push(repo_id);
+ }
+ }
}
} else if let UpdatedGitRepository {
new_work_directory_abs_path: Some(work_directory_abs_path),
@@ -1291,6 +1345,7 @@ impl GitStore {
self._subscriptions
.push(cx.subscribe(&repo, Self::on_jobs_updated));
self.repositories.insert(id, repo);
+ self.worktree_ids.insert(id, HashSet::from([worktree_id]));
cx.emit(GitStoreEvent::RepositoryAdded);
self.active_repo_id.get_or_insert_with(|| {
cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
@@ -1344,7 +1399,44 @@ impl GitStore {
diffs.remove(buffer_id);
}
}
+ BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
+ // Whenever a buffer's file path changes, it's possible that the
+ // new path is actually a path that is being tracked by a git
+ // repository. In that case, we'll want to update the buffer's
+ // `BufferDiffState`, in case it already has one.
+ let buffer_id = buffer.read(cx).remote_id();
+ let diff_state = self.diffs.get(&buffer_id);
+ let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
+
+ if let Some(diff_state) = diff_state
+ && let Some((repo, repo_path)) = repo
+ {
+ let buffer = buffer.clone();
+ let diff_state = diff_state.clone();
+ cx.spawn(async move |_git_store, cx| {
+ async {
+ let diff_bases_change = repo
+ .update(cx, |repo, cx| {
+ repo.load_committed_text(buffer_id, repo_path, cx)
+ })?
+ .await?;
+
+ diff_state.update(cx, |diff_state, cx| {
+ let buffer_snapshot = buffer.read(cx).text_snapshot();
+ diff_state.diff_bases_changed(
+ buffer_snapshot,
+ Some(diff_bases_change),
+ cx,
+ );
+ })
+ }
+ .await
+ .log_err();
+ })
+ .detach();
+ }
+ }
_ => {}
}
}
@@ -1902,6 +1994,15 @@ impl GitStore {
) -> Result<proto::Ack> {
let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
+ let askpass_id = envelope.payload.askpass_id;
+
+ let askpass = make_remote_delegate(
+ this,
+ envelope.payload.project_id,
+ repository_id,
+ askpass_id,
+ &mut cx,
+ );
let message = SharedString::from(envelope.payload.message);
let name = envelope.payload.name.map(SharedString::from);
@@ -1917,6 +2018,7 @@ impl GitStore {
amend: options.amend,
signoff: options.signoff,
},
+ askpass,
cx,
)
})?
@@ -2039,7 +2141,7 @@ impl GitStore {
repository_handle
.update(&mut cx, |repository_handle, _| {
- repository_handle.create_branch(branch_name)
+ repository_handle.create_branch(branch_name, None)
})?
.await??;
@@ -2311,7 +2413,7 @@ impl GitStore {
.entries
.into_iter()
.map(|(path, status)| proto::TreeDiffStatus {
- path: path.0.to_proto(),
+ path: path.as_ref().to_proto(),
status: match status {
TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
TreeDiffStatus::Modified { .. } => {
@@ -3087,13 +3189,13 @@ impl RepositorySnapshot {
pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
self.statuses_by_path
- .get(&PathKey(path.0.clone()), ())
+ .get(&PathKey(path.as_ref().clone()), ())
.cloned()
}
pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
self.pending_ops_by_path
- .get(&PathKey(path.0.clone()), ())
+ .get(&PathKey(path.as_ref().clone()), ())
.cloned()
}
@@ -4161,9 +4263,12 @@ impl Repository {
message: SharedString,
name_and_email: Option<(SharedString, SharedString)>,
options: CommitOptions,
+ askpass: AskPassDelegate,
_cx: &mut App,
) -> oneshot::Receiver<Result<()>> {
let id = self.id;
+ let askpass_delegates = self.askpass_delegates.clone();
+ let askpass_id = util::post_inc(&mut self.latest_askpass_id);
self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
match git_repo {
@@ -4173,10 +4278,15 @@ impl Repository {
..
} => {
backend
- .commit(message, name_and_email, options, environment)
+ .commit(message, name_and_email, options, askpass, environment)
.await
}
RepositoryState::Remote { project_id, client } => {
+ askpass_delegates.lock().insert(askpass_id, askpass);
+ let _defer = util::defer(|| {
+ let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
+ debug_assert!(askpass_delegate.is_some());
+ });
let (name, email) = name_and_email.unzip();
client
.request(proto::Commit {
@@ -4189,6 +4299,7 @@ impl Repository {
amend: options.amend,
signoff: options.signoff,
}),
+ askpass_id,
})
.await
.context("sending commit request")?;
@@ -4653,7 +4764,9 @@ impl Repository {
}
};
Some((
- RepoPath(RelPath::from_proto(&entry.path).log_err()?),
+ RepoPath::from_rel_path(
+ &RelPath::from_proto(&entry.path).log_err()?,
+ ),
status,
))
})
@@ -4692,29 +4805,35 @@ impl Repository {
})
}
- pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
+ pub fn create_branch(
+ &mut self,
+ branch_name: String,
+ base_branch: Option<String>,
+ ) -> oneshot::Receiver<Result<()>> {
let id = self.id;
- self.send_job(
- Some(format!("git switch -c {branch_name}").into()),
- move |repo, _cx| async move {
- match repo {
- RepositoryState::Local { backend, .. } => {
- backend.create_branch(branch_name).await
- }
- RepositoryState::Remote { project_id, client } => {
- client
- .request(proto::GitCreateBranch {
- project_id: project_id.0,
- repository_id: id.to_proto(),
- branch_name,
- })
- .await?;
+ let status_msg = if let Some(ref base) = base_branch {
+ format!("git switch -c {branch_name} {base}").into()
+ } else {
+ format!("git switch -c {branch_name}").into()
+ };
+ self.send_job(Some(status_msg), move |repo, _cx| async move {
+ match repo {
+ RepositoryState::Local { backend, .. } => {
+ backend.create_branch(branch_name, base_branch).await
+ }
+ RepositoryState::Remote { project_id, client } => {
+ client
+ .request(proto::GitCreateBranch {
+ project_id: project_id.0,
+ repository_id: id.to_proto(),
+ branch_name,
+ })
+ .await?;
- Ok(())
- }
+ Ok(())
}
- },
- )
+ }
+ })
}
pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
@@ -5209,7 +5328,8 @@ impl Repository {
let mut cursor = prev_statuses.cursor::<PathProgress>(());
for path in changed_paths.into_iter() {
if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
- changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
+ changed_path_statuses
+ .push(Edit::Remove(PathKey(path.as_ref().clone())));
}
}
changed_path_statuses
@@ -5355,10 +5475,8 @@ fn get_permalink_in_rust_registry_src(
remote,
BuildPermalinkParams::new(
&cargo_vcs_info.git.sha1,
- &RepoPath(
- RelPath::new(&path, PathStyle::local())
- .context("invalid path")?
- .into_arc(),
+ &RepoPath::from_rel_path(
+ &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
),
Some(selection),
),
@@ -5560,7 +5678,11 @@ async fn compute_snapshot(
let mut events = Vec::new();
let branches = backend.branches().await?;
let branch = branches.into_iter().find(|branch| branch.is_head);
- let statuses = backend.status(&[RelPath::empty().into()]).await?;
+ let statuses = backend
+ .status(&[RepoPath::from_rel_path(
+ &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
+ )])
+ .await?;
let stash_entries = backend.stash_entries().await?;
let statuses_by_path = SumTree::from_iter(
statuses
@@ -264,7 +264,7 @@ mod tests {
use super::*;
use fs::FakeFs;
use git::{
- repository::repo_path,
+ repository::{RepoPath, repo_path},
status::{UnmergedStatus, UnmergedStatusCode},
};
use gpui::{BackgroundExecutor, TestAppContext};
@@ -617,7 +617,7 @@ mod tests {
cx.run_until_parked();
fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
state.unmerged_paths.insert(
- rel_path("a.txt").into(),
+ RepoPath::from_rel_path(rel_path("a.txt")),
UnmergedStatus {
first_head: UnmergedStatusCode::Updated,
second_head: UnmergedStatusCode::Updated,
@@ -46,7 +46,7 @@ impl Item for PendingOps {
fn summary(&self, _cx: ()) -> Self::Summary {
PathSummary {
- max_path: self.repo_path.0.clone(),
+ max_path: self.repo_path.as_ref().clone(),
item_summary: PendingOpsSummary {
staged_count: self.staged() as usize,
staging_count: self.staging() as usize,
@@ -73,7 +73,7 @@ impl KeyedItem for PendingOps {
type Key = PathKey;
fn key(&self) -> Self::Key {
- PathKey(self.repo_path.0.clone())
+ PathKey(self.repo_path.as_ref().clone())
}
}
@@ -563,8 +563,8 @@ impl LocalLspStore {
allow_binary_download: bool,
cx: &mut App,
) -> Task<Result<LanguageServerBinary>> {
- if let Some(settings) = settings.binary.as_ref()
- && settings.path.is_some()
+ if let Some(settings) = &settings.binary
+ && let Some(path) = settings.path.as_ref().map(PathBuf::from)
{
let settings = settings.clone();
@@ -573,7 +573,8 @@ impl LocalLspStore {
env.extend(settings.env.unwrap_or_default());
Ok(LanguageServerBinary {
- path: PathBuf::from(&settings.path.unwrap()),
+ // if `path` is absolute, `.join()` will keep it unmodified
+ path: delegate.worktree_root_path().join(path),
env: Some(env),
arguments: settings
.arguments
@@ -7643,14 +7644,16 @@ impl LspStore {
let buffer = buffer.read(cx);
let file = File::from_dyn(buffer.file())?;
let abs_path = file.as_local()?.abs_path(cx);
- let uri = lsp::Uri::from_file_path(abs_path).unwrap();
+ let uri = lsp::Uri::from_file_path(&abs_path)
+ .ok()
+ .with_context(|| format!("Failed to convert path to URI: {}", abs_path.display()))
+ .log_err()?;
let next_snapshot = buffer.text_snapshot();
for language_server in language_servers {
let language_server = language_server.clone();
let buffer_snapshots = self
- .as_local_mut()
- .unwrap()
+ .as_local_mut()?
.buffer_snapshots
.get_mut(&buffer.remote_id())
.and_then(|m| m.get_mut(&language_server.server_id()))?;
@@ -1208,6 +1208,73 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
);
}
+#[gpui::test]
+async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let settings_json_contents = json!({
+ "languages": {
+ "Rust": {
+ "language_servers": ["my_fake_lsp"]
+ }
+ },
+ "lsp": {
+ "my_fake_lsp": {
+ "binary": {
+ "path": path!("relative_path/to/my_fake_lsp_binary.exe").to_string(),
+ }
+ }
+ },
+ });
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/the-root"),
+ json!({
+ ".zed": {
+ "settings.json": settings_json_contents.to_string(),
+ },
+ "relative_path": {
+ "to": {
+ "my_fake_lsp.exe": "",
+ },
+ },
+ "src": {
+ "main.rs": "",
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
+ let language_registry = project.read_with(cx, |project, _| project.languages().clone());
+ language_registry.add(rust_lang());
+
+ let mut fake_rust_servers = language_registry.register_fake_lsp(
+ "Rust",
+ FakeLspAdapter {
+ name: "my_fake_lsp",
+ ..Default::default()
+ },
+ );
+
+ cx.run_until_parked();
+
+ // Start the language server by opening a buffer with a compatible file extension.
+ project
+ .update(cx, |project, cx| {
+ project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
+ })
+ .await
+ .unwrap();
+
+ let lsp_path = fake_rust_servers.next().await.unwrap().binary.path;
+ assert_eq!(
+ lsp_path.to_string_lossy(),
+ path!("/the-root/relative_path/to/my_fake_lsp_binary.exe"),
+ );
+}
+
#[gpui::test]
async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
init_test(cx);
@@ -7937,7 +8004,7 @@ async fn test_staging_random_hunks(
log::info!(
"index text:\n{}",
- repo.load_index_text(rel_path("file.txt").into())
+ repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
.await
.unwrap()
);
@@ -8523,7 +8590,7 @@ async fn test_repository_pending_ops_staging(
assert_eq!(
pending_ops_all
.lock()
- .get(&worktree::PathKey(repo_path("a.txt").0), ())
+ .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
.unwrap()
.ops,
vec![
@@ -8644,7 +8711,7 @@ async fn test_repository_pending_ops_long_running_staging(
assert_eq!(
pending_ops_all
.lock()
- .get(&worktree::PathKey(repo_path("a.txt").0), ())
+ .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
.unwrap()
.ops,
vec![
@@ -8752,7 +8819,7 @@ async fn test_repository_pending_ops_stage_all(
assert_eq!(
pending_ops_all
.lock()
- .get(&worktree::PathKey(repo_path("a.txt").0), ())
+ .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
.unwrap()
.ops,
vec![
@@ -8771,7 +8838,7 @@ async fn test_repository_pending_ops_stage_all(
assert_eq!(
pending_ops_all
.lock()
- .get(&worktree::PathKey(repo_path("b.txt").0), ())
+ .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
.unwrap()
.ops,
vec![
@@ -9309,11 +9376,9 @@ async fn test_file_status(cx: &mut gpui::TestAppContext) {
repository.read_with(cx, |repository, _cx| {
assert_eq!(
repository
- .status_for_path(
- &rel_path(renamed_dir_name)
- .join(rel_path(RENAMED_FILE))
- .into()
- )
+ .status_for_path(&RepoPath::from_rel_path(
+ &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
+ ))
.unwrap()
.status,
FileStatus::Untracked,
@@ -9337,11 +9402,9 @@ async fn test_file_status(cx: &mut gpui::TestAppContext) {
repository.read_with(cx, |repository, _cx| {
assert_eq!(
repository
- .status_for_path(
- &rel_path(renamed_dir_name)
- .join(rel_path(RENAMED_FILE))
- .into()
- )
+ .status_for_path(&RepoPath::from_rel_path(
+ &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
+ ))
.unwrap()
.status,
FileStatus::Untracked,
@@ -10049,6 +10112,120 @@ async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
}
+#[gpui::test]
+async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let file_1_committed = String::from(r#"file_1_committed"#);
+ let file_1_staged = String::from(r#"file_1_staged"#);
+ let file_2_committed = String::from(r#"file_2_committed"#);
+ let file_2_staged = String::from(r#"file_2_staged"#);
+ let buffer_contents = String::from(r#"buffer"#);
+
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ path!("/dir"),
+ json!({
+ ".git": {},
+ "src": {
+ "file_1.rs": file_1_committed.clone(),
+ "file_2.rs": file_2_committed.clone(),
+ }
+ }),
+ )
+ .await;
+
+ fs.set_head_for_repo(
+ path!("/dir/.git").as_ref(),
+ &[
+ ("src/file_1.rs", file_1_committed.clone()),
+ ("src/file_2.rs", file_2_committed.clone()),
+ ],
+ "deadbeef",
+ );
+ fs.set_index_for_repo(
+ path!("/dir/.git").as_ref(),
+ &[
+ ("src/file_1.rs", file_1_staged.clone()),
+ ("src/file_2.rs", file_2_staged.clone()),
+ ],
+ );
+
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+
+ let buffer = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
+ })
+ .await
+ .unwrap();
+
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
+ });
+
+ let unstaged_diff = project
+ .update(cx, |project, cx| {
+ project.open_unstaged_diff(buffer.clone(), cx)
+ })
+ .await
+ .unwrap();
+
+ cx.run_until_parked();
+
+ unstaged_diff.update(cx, |unstaged_diff, _cx| {
+ let base_text = unstaged_diff.base_text_string().unwrap();
+ assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
+ });
+
+ // Save the buffer as `file_2.rs`, which should trigger the
+ // `BufferChangedFilePath` event.
+ project
+ .update(cx, |project, cx| {
+ let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
+ let path = ProjectPath {
+ worktree_id,
+ path: rel_path("src/file_2.rs").into(),
+ };
+ project.save_buffer_as(buffer.clone(), path, cx)
+ })
+ .await
+ .unwrap();
+
+ cx.run_until_parked();
+
+ // Verify that the diff bases have been updated to file_2's contents due to
+ // the `BufferChangedFilePath` event being handled.
+ unstaged_diff.update(cx, |unstaged_diff, cx| {
+ let snapshot = buffer.read(cx).snapshot();
+ let base_text = unstaged_diff.base_text_string().unwrap();
+ assert_eq!(
+ base_text, file_2_staged,
+ "Diff bases should be automatically updated to file_2 staged content"
+ );
+
+ let hunks: Vec<_> = unstaged_diff.hunks(&snapshot, cx).collect();
+ assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
+ });
+
+ let uncommitted_diff = project
+ .update(cx, |project, cx| {
+ project.open_uncommitted_diff(buffer.clone(), cx)
+ })
+ .await
+ .unwrap();
+
+ cx.run_until_parked();
+
+ uncommitted_diff.update(cx, |uncommitted_diff, _cx| {
+ let base_text = uncommitted_diff.base_text_string().unwrap();
+ assert_eq!(
+ base_text, file_2_committed,
+ "Uncommitted diff should compare against file_2 committed content"
+ );
+ });
+}
+
async fn search(
project: &Entity<Project>,
query: SearchQuery,
@@ -10477,3 +10654,116 @@ async fn test_find_project_path_abs(
);
});
}
+
+#[gpui::test]
+async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/root"),
+ json!({
+ "a": {
+ ".git": {},
+ "src": {
+ "main.rs": "fn main() {}",
+ }
+ },
+ "b": {
+ ".git": {},
+ "src": {
+ "main.rs": "fn main() {}",
+ },
+ "script": {
+ "run.sh": "#!/bin/bash"
+ }
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(
+ fs.clone(),
+ [
+ path!("/root/a").as_ref(),
+ path!("/root/b/script").as_ref(),
+ path!("/root/b").as_ref(),
+ ],
+ cx,
+ )
+ .await;
+ let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
+ scan_complete.await;
+
+ let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
+ assert_eq!(worktrees.len(), 3);
+
+ let worktree_id_by_abs_path = worktrees
+ .into_iter()
+ .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
+ .collect::<HashMap<_, _>>();
+ let worktree_id = worktree_id_by_abs_path
+ .get(Path::new(path!("/root/b/script")))
+ .unwrap();
+
+ let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
+ assert_eq!(repos.len(), 2);
+
+ project.update(cx, |project, cx| {
+ project.remove_worktree(*worktree_id, cx);
+ });
+ cx.run_until_parked();
+
+ let mut repo_paths = project
+ .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
+ .values()
+ .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
+ .collect::<Vec<_>>();
+ repo_paths.sort();
+
+ pretty_assertions::assert_eq!(
+ repo_paths,
+ [
+ Path::new(path!("/root/a")).into(),
+ Path::new(path!("/root/b")).into(),
+ ]
+ );
+
+ let active_repo_path = project
+ .read_with(cx, |p, cx| {
+ p.active_repository(cx)
+ .map(|r| r.read(cx).work_directory_abs_path.clone())
+ })
+ .unwrap();
+ assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
+
+ let worktree_id = worktree_id_by_abs_path
+ .get(Path::new(path!("/root/a")))
+ .unwrap();
+ project.update(cx, |project, cx| {
+ project.remove_worktree(*worktree_id, cx);
+ });
+ cx.run_until_parked();
+
+ let active_repo_path = project
+ .read_with(cx, |p, cx| {
+ p.active_repository(cx)
+ .map(|r| r.read(cx).work_directory_abs_path.clone())
+ })
+ .unwrap();
+ assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
+
+ let worktree_id = worktree_id_by_abs_path
+ .get(Path::new(path!("/root/b")))
+ .unwrap();
+ project.update(cx, |project, cx| {
+ project.remove_worktree(*worktree_id, cx);
+ });
+ cx.run_until_parked();
+
+ let active_repo_path = project.read_with(cx, |p, cx| {
+ p.active_repository(cx)
+ .map(|r| r.read(cx).work_directory_abs_path.clone())
+ });
+ assert!(active_repo_path.is_none());
+}
@@ -347,6 +347,7 @@ message Commit {
string message = 6;
optional CommitOptions options = 7;
reserved 8;
+ uint64 askpass_id = 9;
message CommitOptions {
bool amend = 1;
@@ -486,10 +486,10 @@ impl remote::RemoteClientDelegate for RemoteClientDelegate {
let this = self.clone();
cx.spawn(async move |cx| {
AutoUpdater::download_remote_server_release(
- platform.os,
- platform.arch,
release_channel,
version,
+ platform.os,
+ platform.arch,
move |status, cx| this.set_status(Some(status), cx),
cx,
)
@@ -507,19 +507,19 @@ impl remote::RemoteClientDelegate for RemoteClientDelegate {
})
}
- fn get_download_params(
+ fn get_download_url(
&self,
platform: RemotePlatform,
release_channel: ReleaseChannel,
version: Option<SemanticVersion>,
cx: &mut AsyncApp,
- ) -> Task<Result<Option<(String, String)>>> {
+ ) -> Task<Result<Option<String>>> {
cx.spawn(async move |cx| {
AutoUpdater::get_remote_server_release_url(
- platform.os,
- platform.arch,
release_channel,
version,
+ platform.os,
+ platform.arch,
cx,
)
.await
@@ -126,6 +126,12 @@ pub fn init(app_version: SemanticVersion, cx: &mut App) {
cx.set_global(GlobalReleaseChannel(*RELEASE_CHANNEL))
}
+/// Initializes the release channel for tests that rely on fake release channel.
+pub fn init_test(app_version: SemanticVersion, release_channel: ReleaseChannel, cx: &mut App) {
+ cx.set_global(GlobalAppVersion(app_version));
+ cx.set_global(GlobalReleaseChannel(release_channel))
+}
+
impl ReleaseChannel {
/// Returns the global [`ReleaseChannel`].
pub fn global(cx: &App) -> Self {
@@ -67,13 +67,13 @@ pub trait RemoteClientDelegate: Send + Sync {
tx: oneshot::Sender<EncryptedPassword>,
cx: &mut AsyncApp,
);
- fn get_download_params(
+ fn get_download_url(
&self,
platform: RemotePlatform,
release_channel: ReleaseChannel,
version: Option<SemanticVersion>,
cx: &mut AsyncApp,
- ) -> Task<Result<Option<(String, String)>>>;
+ ) -> Task<Result<Option<String>>>;
fn download_server_binary_locally(
&self,
platform: RemotePlatform,
@@ -1669,13 +1669,13 @@ mod fake {
unreachable!()
}
- fn get_download_params(
+ fn get_download_url(
&self,
_platform: RemotePlatform,
_release_channel: ReleaseChannel,
_version: Option<SemanticVersion>,
_cx: &mut AsyncApp,
- ) -> Task<Result<Option<(String, String)>>> {
+ ) -> Task<Result<Option<String>>> {
unreachable!()
}
@@ -606,12 +606,12 @@ impl SshRemoteConnection {
.unwrap(),
);
if !self.socket.connection_options.upload_binary_over_ssh
- && let Some((url, body)) = delegate
- .get_download_params(self.ssh_platform, release_channel, wanted_version, cx)
+ && let Some(url) = delegate
+ .get_download_url(self.ssh_platform, release_channel, wanted_version, cx)
.await?
{
match self
- .download_binary_on_server(&url, &body, &tmp_path_gz, delegate, cx)
+ .download_binary_on_server(&url, &tmp_path_gz, delegate, cx)
.await
{
Ok(_) => {
@@ -644,7 +644,6 @@ impl SshRemoteConnection {
async fn download_binary_on_server(
&self,
url: &str,
- body: &str,
tmp_path_gz: &RelPath,
delegate: &Arc<dyn RemoteClientDelegate>,
cx: &mut AsyncApp,
@@ -670,12 +669,6 @@ impl SshRemoteConnection {
&[
"-f",
"-L",
- "-X",
- "GET",
- "-H",
- "Content-Type: application/json",
- "-d",
- body,
url,
"-o",
&tmp_path_gz.display(self.path_style()),
@@ -700,14 +693,7 @@ impl SshRemoteConnection {
.run_command(
self.ssh_shell_kind,
"wget",
- &[
- "--header=Content-Type: application/json",
- "--body-data",
- body,
- url,
- "-O",
- &tmp_path_gz.display(self.path_style()),
- ],
+ &[url, "-O", &tmp_path_gz.display(self.path_style())],
true,
)
.await
@@ -19,6 +19,7 @@ use std::{
time::Instant,
};
use util::{
+ ResultExt as _,
paths::{PathStyle, RemotePathBuf},
rel_path::RelPath,
shell::ShellKind,
@@ -79,20 +80,27 @@ impl WslRemoteConnection {
can_exec: true,
};
delegate.set_status(Some("Detecting WSL environment"), cx);
- this.shell = this.detect_shell().await?;
+ this.shell = this
+ .detect_shell()
+ .await
+ .context("failed detecting shell")?;
this.shell_kind = ShellKind::new(&this.shell, false);
- this.can_exec = this.detect_can_exec().await?;
- this.platform = this.detect_platform().await?;
+ this.can_exec = this.detect_can_exec().await;
+ this.platform = this
+ .detect_platform()
+ .await
+ .context("failed detecting platform")?;
this.remote_binary_path = Some(
this.ensure_server_binary(&delegate, release_channel, version, commit, cx)
- .await?,
+ .await
+ .context("failed ensuring server binary")?,
);
log::debug!("Detected WSL environment: {this:#?}");
Ok(this)
}
- async fn detect_can_exec(&self) -> Result<bool> {
+ async fn detect_can_exec(&self) -> bool {
let options = &self.connection_options;
let program = self.shell_kind.prepend_command_prefix("uname");
let args = &["-m"];
@@ -101,10 +109,13 @@ impl WslRemoteConnection {
.await;
if !output.is_ok_and(|output| output.status.success()) {
- run_wsl_command_impl(options, &program, args, false).await?;
- Ok(false)
+ run_wsl_command_impl(options, &program, args, false)
+ .await
+ .context("failed detecting exec status")
+ .log_err();
+ false
} else {
- Ok(true)
+ true
}
}
async fn detect_platform(&self) -> Result<RemotePlatform> {
@@ -504,7 +515,9 @@ impl RemoteConnection for WslRemoteConnection {
/// `wslpath` is a executable available in WSL, it's a linux binary.
/// So it doesn't support Windows style paths.
async fn sanitize_path(path: &Path) -> Result<String> {
- let path = smol::fs::canonicalize(path).await?;
+ let path = smol::fs::canonicalize(path)
+ .await
+ .with_context(|| format!("Failed to canonicalize path {}", path.display()))?;
let path_str = path.to_string_lossy();
let sanitized = path_str.strip_prefix(r"\\?\").unwrap_or(&path_str);
@@ -526,14 +539,16 @@ async fn run_wsl_command_impl(
args: &[&str],
exec: bool,
) -> Result<String> {
- let output = wsl_command_impl(options, program, args, exec)
+ let mut command = wsl_command_impl(options, program, args, exec);
+ let output = command
.output()
- .await?;
+ .await
+ .with_context(|| format!("Failed to run command '{:?}'", command))?;
if !output.status.success() {
return Err(anyhow!(
- "Command '{}' failed: {}",
- program,
+ "Command '{:?}' failed: {}",
+ command,
String::from_utf8_lossy(&output.stderr).trim()
));
}
@@ -1662,7 +1662,7 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA
// Also try creating a new branch
cx.update(|cx| {
repository.update(cx, |repo, _cx| {
- repo.create_branch("totally-new-branch".to_string())
+ repo.create_branch("totally-new-branch".to_string(), None)
})
})
.await
@@ -127,12 +127,6 @@ pub struct BufferSearchBar {
regex_language: Option<Arc<Language>>,
}
-impl BufferSearchBar {
- pub fn query_editor_focused(&self) -> bool {
- self.query_editor_focused
- }
-}
-
impl EventEmitter<Event> for BufferSearchBar {}
impl EventEmitter<workspace::ToolbarItemEvent> for BufferSearchBar {}
impl Render for BufferSearchBar {
@@ -521,6 +515,10 @@ impl ToolbarItemView for BufferSearchBar {
}
impl BufferSearchBar {
+ pub fn query_editor_focused(&self) -> bool {
+ self.query_editor_focused
+ }
+
pub fn register(registrar: &mut impl SearchActionsRegistrar) {
registrar.register_handler(ForDeployed(|this, _: &FocusSearch, window, cx| {
this.query_editor.focus_handle(cx).focus(window);
@@ -696,6 +694,8 @@ impl BufferSearchBar {
pub fn dismiss(&mut self, _: &Dismiss, window: &mut Window, cx: &mut Context<Self>) {
self.dismissed = true;
self.query_error = None;
+ self.sync_select_next_case_sensitivity(cx);
+
for searchable_item in self.searchable_items_with_matches.keys() {
if let Some(searchable_item) =
WeakSearchableItemHandle::upgrade(searchable_item.as_ref(), cx)
@@ -711,6 +711,7 @@ impl BufferSearchBar {
let handle = active_editor.item_focus_handle(cx);
self.focus(&handle, window);
}
+
cx.emit(Event::UpdateLocation);
cx.emit(ToolbarItemEvent::ChangeLocation(
ToolbarItemLocation::Hidden,
@@ -730,6 +731,7 @@ impl BufferSearchBar {
}
self.search_suggested(window, cx);
self.smartcase(window, cx);
+ self.sync_select_next_case_sensitivity(cx);
self.replace_enabled = deploy.replace_enabled;
self.selection_search_enabled = if deploy.selection_search_enabled {
Some(FilteredSearchRange::Default)
@@ -919,6 +921,7 @@ impl BufferSearchBar {
self.default_options = self.search_options;
drop(self.update_matches(false, false, window, cx));
self.adjust_query_regex_language(cx);
+ self.sync_select_next_case_sensitivity(cx);
cx.notify();
}
@@ -953,6 +956,7 @@ impl BufferSearchBar {
pub fn set_search_options(&mut self, search_options: SearchOptions, cx: &mut Context<Self>) {
self.search_options = search_options;
self.adjust_query_regex_language(cx);
+ self.sync_select_next_case_sensitivity(cx);
cx.notify();
}
@@ -1507,6 +1511,7 @@ impl BufferSearchBar {
.read(cx)
.as_singleton()
.expect("query editor should be backed by a singleton buffer");
+
if enable {
if let Some(regex_language) = self.regex_language.clone() {
query_buffer.update(cx, |query_buffer, cx| {
@@ -1519,6 +1524,24 @@ impl BufferSearchBar {
})
}
}
+
+ /// Updates the searchable item's case sensitivity option to match the
+ /// search bar's current case sensitivity setting. This ensures that
+ /// editor's `select_next`/ `select_previous` operations respect the buffer
+ /// search bar's search options.
+ ///
+ /// Clears the case sensitivity when the search bar is dismissed so that
+ /// only the editor's settings are respected.
+ fn sync_select_next_case_sensitivity(&self, cx: &mut Context<Self>) {
+ let case_sensitive = match self.dismissed {
+ true => None,
+ false => Some(self.search_options.contains(SearchOptions::CASE_SENSITIVE)),
+ };
+
+ if let Some(active_searchable_item) = self.active_searchable_item.as_ref() {
+ active_searchable_item.set_search_is_case_sensitive(case_sensitive, cx);
+ }
+ }
}
#[cfg(test)]
@@ -1528,7 +1551,7 @@ mod tests {
use super::*;
use editor::{
DisplayPoint, Editor, MultiBuffer, SearchSettings, SelectionEffects,
- display_map::DisplayRow,
+ display_map::DisplayRow, test::editor_test_context::EditorTestContext,
};
use gpui::{Hsla, TestAppContext, UpdateGlobal, VisualTestContext};
use language::{Buffer, Point};
@@ -2963,6 +2986,61 @@ mod tests {
});
}
+ #[gpui::test]
+ async fn test_select_occurrence_case_sensitivity(cx: &mut TestAppContext) {
+ let (editor, search_bar, cx) = init_test(cx);
+ let mut editor_cx = EditorTestContext::for_editor_in(editor, cx).await;
+
+ // Start with case sensitive search settings.
+ let mut search_settings = SearchSettings::default();
+ search_settings.case_sensitive = true;
+ update_search_settings(search_settings, cx);
+ search_bar.update(cx, |search_bar, cx| {
+ let mut search_options = search_bar.search_options;
+ search_options.insert(SearchOptions::CASE_SENSITIVE);
+ search_bar.set_search_options(search_options, cx);
+ });
+
+ editor_cx.set_state("Β«ΛfooΒ»\nFOO\nFoo\nfoo");
+ editor_cx.update_editor(|e, window, cx| {
+ e.select_next(&Default::default(), window, cx).unwrap();
+ });
+ editor_cx.assert_editor_state("Β«ΛfooΒ»\nFOO\nFoo\nΒ«ΛfooΒ»");
+
+ // Update the search bar's case sensitivite toggle, so we can later
+ // confirm that `select_next` will now be case-insensitive.
+ editor_cx.set_state("Β«ΛfooΒ»\nFOO\nFoo\nfoo");
+ search_bar.update_in(cx, |search_bar, window, cx| {
+ search_bar.toggle_case_sensitive(&Default::default(), window, cx);
+ });
+ editor_cx.update_editor(|e, window, cx| {
+ e.select_next(&Default::default(), window, cx).unwrap();
+ });
+ editor_cx.assert_editor_state("Β«ΛfooΒ»\nΒ«ΛFOOΒ»\nFoo\nfoo");
+
+ // Confirm that, after dismissing the search bar, only the editor's
+ // search settings actually affect the behavior of `select_next`.
+ search_bar.update_in(cx, |search_bar, window, cx| {
+ search_bar.dismiss(&Default::default(), window, cx);
+ });
+ editor_cx.set_state("Β«ΛfooΒ»\nFOO\nFoo\nfoo");
+ editor_cx.update_editor(|e, window, cx| {
+ e.select_next(&Default::default(), window, cx).unwrap();
+ });
+ editor_cx.assert_editor_state("Β«ΛfooΒ»\nFOO\nFoo\nΒ«ΛfooΒ»");
+
+ // Update the editor's search settings, disabling case sensitivity, to
+ // check that the value is respected.
+ let mut search_settings = SearchSettings::default();
+ search_settings.case_sensitive = false;
+ update_search_settings(search_settings, cx);
+ editor_cx.set_state("Β«ΛfooΒ»\nFOO\nFoo\nfoo");
+ editor_cx.update_editor(|e, window, cx| {
+ e.select_next(&Default::default(), window, cx).unwrap();
+ });
+ editor_cx.assert_editor_state("Β«ΛfooΒ»\nΒ«ΛFOOΒ»\nFoo\nfoo");
+ }
+
fn update_search_settings(search_settings: SearchSettings, cx: &mut TestAppContext) {
cx.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
@@ -176,6 +176,8 @@ pub struct AgentProfileContent {
pub enable_all_context_servers: Option<bool>,
#[serde(default)]
pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
+ /// The default language model selected when using this profile.
+ pub default_model: Option<LanguageModelSelection>,
}
#[skip_serializing_none]
@@ -96,6 +96,10 @@ pub struct EditorSettingsContent {
/// Default: 4.0
#[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
pub fast_scroll_sensitivity: Option<f32>,
+ /// Settings for sticking scopes to the top of the editor.
+ ///
+ /// Default: sticky scroll is disabled
+ pub sticky_scroll: Option<StickyScrollContent>,
/// Whether the line numbers on editors gutter are relative or not.
/// When "enabled" shows relative number of buffer lines, when "wrapped" shows
/// relative number of display lines.
@@ -312,6 +316,16 @@ pub struct ScrollbarContent {
pub axes: Option<ScrollbarAxesContent>,
}
+/// Sticky scroll related settings
+#[skip_serializing_none]
+#[derive(Clone, Default, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq)]
+pub struct StickyScrollContent {
+ /// Whether sticky scroll is enabled.
+ ///
+ /// Default: false
+ pub enabled: Option<bool>,
+}
+
/// Minimap related settings
#[skip_serializing_none]
#[derive(Clone, Default, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq)]
@@ -745,9 +759,13 @@ pub enum SnippetSortOrder {
pub struct SearchSettingsContent {
/// Whether to show the project search button in the status bar.
pub button: Option<bool>,
+ /// Whether to only match on whole words.
pub whole_word: Option<bool>,
+ /// Whether to match case sensitively.
pub case_sensitive: Option<bool>,
+ /// Whether to include gitignored files in search results.
pub include_ignored: Option<bool>,
+ /// Whether to interpret the search query as a regular expression.
pub regex: Option<bool>,
/// Whether to center the cursor on each search match when navigating.
pub center_on_match: Option<bool>,
@@ -108,7 +108,17 @@ pub struct WorktreeSettingsContent {
#[serde(rename_all = "snake_case")]
pub struct LspSettings {
pub binary: Option<BinarySettings>,
+ /// Options passed to the language server at startup.
+ ///
+ /// Ref: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#initialize
+ ///
+ /// Consult the documentation for the specific language server to see which settings are supported.
pub initialization_options: Option<serde_json::Value>,
+ /// Language server settings.
+ ///
+ /// Ref: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#workspace_configuration
+ ///
+ /// Consult the documentation for the specific language server to see which settings are supported.
pub settings: Option<serde_json::Value>,
/// If the server supports sending tasks over LSP extensions,
/// this setting can be used to enable or disable them in Zed.
@@ -157,7 +157,7 @@ pub enum ThemeSelection {
Dynamic {
/// The mode used to determine which theme to use.
#[serde(default)]
- mode: ThemeMode,
+ mode: ThemeAppearanceMode,
/// The theme to use for light mode.
light: ThemeName,
/// The theme to use for dark mode.
@@ -186,7 +186,7 @@ pub enum IconThemeSelection {
Dynamic {
/// The mode used to determine which theme to use.
#[serde(default)]
- mode: ThemeMode,
+ mode: ThemeAppearanceMode,
/// The icon theme to use for light mode.
light: IconThemeName,
/// The icon theme to use for dark mode.
@@ -194,7 +194,6 @@ pub enum IconThemeSelection {
},
}
-// TODO: Rename ThemeMode -> ThemeAppearanceMode
/// The mode use to select a theme.
///
/// `Light` and `Dark` will select their respective themes.
@@ -215,7 +214,7 @@ pub enum IconThemeSelection {
strum::VariantNames,
)]
#[serde(rename_all = "snake_case")]
-pub enum ThemeMode {
+pub enum ThemeAppearanceMode {
/// Use the specified `light` theme.
Light,
@@ -255,6 +255,7 @@ impl VsCodeSettings {
excerpt_context_lines: None,
expand_excerpt_lines: None,
fast_scroll_sensitivity: self.read_f32("editor.fastScrollSensitivity"),
+ sticky_scroll: self.sticky_scroll_content(),
go_to_definition_fallback: None,
gutter: self.gutter_content(),
hide_mouse: None,
@@ -303,6 +304,12 @@ impl VsCodeSettings {
}
}
+ fn sticky_scroll_content(&self) -> Option<StickyScrollContent> {
+ skip_default(StickyScrollContent {
+ enabled: self.read_bool("editor.stickyScroll.enabled"),
+ })
+ }
+
fn gutter_content(&self) -> Option<GutterContent> {
skip_default(GutterContent {
line_numbers: self.read_enum("editor.lineNumbers", |s| match s {
@@ -747,7 +754,13 @@ impl VsCodeSettings {
let env = self
.read_value(&format!("terminal.integrated.env.{platform}"))
.and_then(|v| v.as_object())
- .map(|v| v.iter().map(|(k, v)| (k.clone(), v.to_string())).collect());
+ .map(|v| {
+ v.iter()
+ .map(|(k, v)| (k.clone(), v.to_string()))
+ // zed does not support substitutions, so this can break env vars
+ .filter(|(_, v)| !v.contains('$'))
+ .collect()
+ });
ProjectTerminalSettingsContent {
// TODO: handle arguments
@@ -300,9 +300,9 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
settings::ThemeSelection::Static(_) => return,
settings::ThemeSelection::Dynamic { mode, light, dark } => {
match mode {
- theme::ThemeMode::Light => light.clone(),
- theme::ThemeMode::Dark => dark.clone(),
- theme::ThemeMode::System => dark.clone(), // no cx, can't determine correct choice
+ theme::ThemeAppearanceMode::Light => light.clone(),
+ theme::ThemeAppearanceMode::Dark => dark.clone(),
+ theme::ThemeAppearanceMode::System => dark.clone(), // no cx, can't determine correct choice
}
},
};
@@ -315,7 +315,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
};
settings::ThemeSelection::Dynamic {
- mode: settings::ThemeMode::System,
+ mode: settings::ThemeAppearanceMode::System,
light: static_name.clone(),
dark: static_name,
}
@@ -470,9 +470,9 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
settings::IconThemeSelection::Static(_) => return,
settings::IconThemeSelection::Dynamic { mode, light, dark } => {
match mode {
- theme::ThemeMode::Light => light.clone(),
- theme::ThemeMode::Dark => dark.clone(),
- theme::ThemeMode::System => dark.clone(), // no cx, can't determine correct choice
+ theme::ThemeAppearanceMode::Light => light.clone(),
+ theme::ThemeAppearanceMode::Dark => dark.clone(),
+ theme::ThemeAppearanceMode::System => dark.clone(), // no cx, can't determine correct choice
}
},
};
@@ -485,7 +485,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
};
settings::IconThemeSelection::Dynamic {
- mode: settings::ThemeMode::System,
+ mode: settings::ThemeAppearanceMode::System,
light: static_name.clone(),
dark: static_name,
}
@@ -1352,6 +1352,21 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
metadata: None,
files: USER,
}),
+ SettingsPageItem::SettingItem(SettingItem {
+ title: "Sticky Scroll",
+ description: "Whether to stick scopes to the top of the editor",
+ field: Box::new(SettingField {
+ json_path: Some("sticky_scroll.enabled"),
+ pick: |settings_content| {
+ settings_content.editor.sticky_scroll.as_ref().and_then(|sticky_scroll| sticky_scroll.enabled.as_ref())
+ },
+ write: |settings_content, value| {
+ settings_content.editor.sticky_scroll.get_or_insert_default().enabled = value;
+ },
+ }),
+ metadata: None,
+ files: USER,
+ }),
SettingsPageItem::SectionHeader("Signature Help"),
SettingsPageItem::SettingItem(SettingItem {
title: "Auto Signature Help",
@@ -486,7 +486,7 @@ fn init_renderers(cx: &mut App) {
.add_basic_renderer::<settings::PaneSplitDirectionVertical>(render_dropdown)
.add_basic_renderer::<settings::DocumentColorsRenderMode>(render_dropdown)
.add_basic_renderer::<settings::ThemeSelectionDiscriminants>(render_dropdown)
- .add_basic_renderer::<settings::ThemeMode>(render_dropdown)
+ .add_basic_renderer::<settings::ThemeAppearanceMode>(render_dropdown)
.add_basic_renderer::<settings::ThemeName>(render_theme_picker)
.add_basic_renderer::<settings::IconThemeSelectionDiscriminants>(render_dropdown)
.add_basic_renderer::<settings::IconThemeName>(render_icon_theme_picker)
@@ -46,7 +46,7 @@ impl AlacModifiers {
pub fn to_esc_str(
keystroke: &Keystroke,
mode: &TermMode,
- alt_is_meta: bool,
+ option_as_meta: bool,
) -> Option<Cow<'static, str>> {
let modifiers = AlacModifiers::new(keystroke);
@@ -218,7 +218,7 @@ pub fn to_esc_str(
}
}
- if alt_is_meta {
+ if !cfg!(target_os = "macos") || option_as_meta {
let is_alt_lowercase_ascii = modifiers == AlacModifiers::Alt && keystroke.key.is_ascii();
let is_alt_uppercase_ascii =
keystroke.modifiers.alt && keystroke.modifiers.shift && keystroke.key.is_ascii();
@@ -383,7 +383,7 @@ impl TerminalBuilder {
selection_phase: SelectionPhase::Ended,
hyperlink_regex_searches: RegexSearches::new(),
vi_mode_enabled: false,
- is_ssh_terminal: false,
+ is_remote_terminal: false,
last_mouse_move_time: Instant::now(),
last_hyperlink_search_position: None,
#[cfg(windows)]
@@ -415,7 +415,7 @@ impl TerminalBuilder {
cursor_shape: CursorShape,
alternate_scroll: AlternateScroll,
max_scroll_history_lines: Option<usize>,
- is_ssh_terminal: bool,
+ is_remote_terminal: bool,
window_id: u64,
completion_tx: Option<Sender<Option<ExitStatus>>>,
cx: &App,
@@ -601,7 +601,7 @@ impl TerminalBuilder {
selection_phase: SelectionPhase::Ended,
hyperlink_regex_searches: RegexSearches::new(),
vi_mode_enabled: false,
- is_ssh_terminal,
+ is_remote_terminal,
last_mouse_move_time: Instant::now(),
last_hyperlink_search_position: None,
#[cfg(windows)]
@@ -646,7 +646,7 @@ impl TerminalBuilder {
})
};
// the thread we spawn things on has an effect on signal handling
- if cfg!(target_os = "unix") {
+ if !cfg!(target_os = "windows") {
cx.spawn(async move |_| fut.await)
} else {
cx.background_spawn(fut)
@@ -828,7 +828,7 @@ pub struct Terminal {
hyperlink_regex_searches: RegexSearches,
task: Option<TaskState>,
vi_mode_enabled: bool,
- is_ssh_terminal: bool,
+ is_remote_terminal: bool,
last_mouse_move_time: Instant,
last_hyperlink_search_position: Option<Point<Pixels>>,
#[cfg(windows)]
@@ -1386,7 +1386,15 @@ impl Terminal {
/// (This is a no-op for display-only terminals.)
fn write_to_pty(&self, input: impl Into<Cow<'static, [u8]>>) {
if let TerminalType::Pty { pty_tx, .. } = &self.terminal_type {
- pty_tx.notify(input.into());
+ let input = input.into();
+ if log::log_enabled!(log::Level::Debug) {
+ if let Ok(str) = str::from_utf8(&input) {
+ log::debug!("Writing to PTY: {:?}", str);
+ } else {
+ log::debug!("Writing to PTY: {:?}", input);
+ }
+ }
+ pty_tx.notify(input);
}
}
@@ -1490,14 +1498,14 @@ impl Terminal {
}
}
- pub fn try_keystroke(&mut self, keystroke: &Keystroke, alt_is_meta: bool) -> bool {
+ pub fn try_keystroke(&mut self, keystroke: &Keystroke, option_as_meta: bool) -> bool {
if self.vi_mode_enabled {
self.vi_motion(keystroke);
return true;
}
// Keep default terminal behavior
- let esc = to_esc_str(keystroke, &self.last_content.mode, alt_is_meta);
+ let esc = to_esc_str(keystroke, &self.last_content.mode, option_as_meta);
if let Some(esc) = esc {
match esc {
Cow::Borrowed(string) => self.input(string.as_bytes()),
@@ -1959,7 +1967,7 @@ impl Terminal {
}
pub fn working_directory(&self) -> Option<PathBuf> {
- if self.is_ssh_terminal {
+ if self.is_remote_terminal {
// We can't yet reliably detect the working directory of a shell on the
// SSH host. Until we can do that, it doesn't make sense to display
// the working directory on the client and persist that.
@@ -2158,7 +2166,7 @@ impl Terminal {
self.template.cursor_shape,
self.template.alternate_scroll,
self.template.max_scroll_history_lines,
- self.is_ssh_terminal,
+ self.is_remote_terminal,
self.template.window_id,
None,
cx,
@@ -11,7 +11,7 @@ use gpui::{
use refineable::Refineable;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-pub use settings::{FontFamilyName, IconThemeName, ThemeMode, ThemeName};
+pub use settings::{FontFamilyName, IconThemeName, ThemeAppearanceMode, ThemeName};
use settings::{RegisterSetting, Settings, SettingsContent};
use std::sync::Arc;
@@ -208,7 +208,7 @@ pub enum ThemeSelection {
Dynamic {
/// The mode used to determine which theme to use.
#[serde(default)]
- mode: ThemeMode,
+ mode: ThemeAppearanceMode,
/// The theme to use for light mode.
light: ThemeName,
/// The theme to use for dark mode.
@@ -233,9 +233,9 @@ impl ThemeSelection {
match self {
Self::Static(theme) => theme.clone(),
Self::Dynamic { mode, light, dark } => match mode {
- ThemeMode::Light => light.clone(),
- ThemeMode::Dark => dark.clone(),
- ThemeMode::System => match system_appearance {
+ ThemeAppearanceMode::Light => light.clone(),
+ ThemeAppearanceMode::Dark => dark.clone(),
+ ThemeAppearanceMode::System => match system_appearance {
Appearance::Light => light.clone(),
Appearance::Dark => dark.clone(),
},
@@ -244,7 +244,7 @@ impl ThemeSelection {
}
/// Returns the [ThemeMode] for the [ThemeSelection].
- pub fn mode(&self) -> Option<ThemeMode> {
+ pub fn mode(&self) -> Option<ThemeAppearanceMode> {
match self {
ThemeSelection::Static(_) => None,
ThemeSelection::Dynamic { mode, .. } => Some(*mode),
@@ -260,7 +260,7 @@ pub enum IconThemeSelection {
/// A dynamic icon theme selection, which can change based on the [`ThemeMode`].
Dynamic {
/// The mode used to determine which theme to use.
- mode: ThemeMode,
+ mode: ThemeAppearanceMode,
/// The icon theme to use for light mode.
light: IconThemeName,
/// The icon theme to use for dark mode.
@@ -285,9 +285,9 @@ impl IconThemeSelection {
match self {
Self::Static(theme) => theme.clone(),
Self::Dynamic { mode, light, dark } => match mode {
- ThemeMode::Light => light.clone(),
- ThemeMode::Dark => dark.clone(),
- ThemeMode::System => match system_appearance {
+ ThemeAppearanceMode::Light => light.clone(),
+ ThemeAppearanceMode::Dark => dark.clone(),
+ ThemeAppearanceMode::System => match system_appearance {
Appearance::Light => light.clone(),
Appearance::Dark => dark.clone(),
},
@@ -296,7 +296,7 @@ impl IconThemeSelection {
}
/// Returns the [`ThemeMode`] for the [`IconThemeSelection`].
- pub fn mode(&self) -> Option<ThemeMode> {
+ pub fn mode(&self) -> Option<ThemeAppearanceMode> {
match self {
IconThemeSelection::Static(_) => None,
IconThemeSelection::Dynamic { mode, .. } => Some(*mode),
@@ -315,9 +315,9 @@ pub fn set_theme(
let theme_to_update = match selection {
settings::ThemeSelection::Static(theme) => theme,
settings::ThemeSelection::Dynamic { mode, light, dark } => match mode {
- ThemeMode::Light => light,
- ThemeMode::Dark => dark,
- ThemeMode::System => match appearance {
+ ThemeAppearanceMode::Light => light,
+ ThemeAppearanceMode::Dark => dark,
+ ThemeAppearanceMode::System => match appearance {
Appearance::Light => light,
Appearance::Dark => dark,
},
@@ -342,9 +342,9 @@ pub fn set_icon_theme(
let icon_theme_to_update = match selection {
settings::IconThemeSelection::Static(theme) => theme,
settings::IconThemeSelection::Dynamic { mode, light, dark } => match mode {
- ThemeMode::Light => light,
- ThemeMode::Dark => dark,
- ThemeMode::System => match appearance {
+ ThemeAppearanceMode::Light => light,
+ ThemeAppearanceMode::Dark => dark,
+ ThemeAppearanceMode::System => match appearance {
Appearance::Light => light,
Appearance::Dark => dark,
},
@@ -358,7 +358,7 @@ pub fn set_icon_theme(
}
/// Sets the mode for the theme.
-pub fn set_mode(content: &mut SettingsContent, mode: ThemeMode) {
+pub fn set_mode(content: &mut SettingsContent, mode: ThemeAppearanceMode) {
let theme = content.theme.as_mut();
if let Some(selection) = theme.theme.as_mut() {
@@ -227,6 +227,15 @@ impl SystemWindowTabs {
window.activate_window();
});
})
+ .on_mouse_up(MouseButton::Middle, move |_, window, cx| {
+ if item.handle.window_id() == window.window_handle().window_id() {
+ window.dispatch_action(Box::new(CloseWindow), cx);
+ } else {
+ let _ = item.handle.update(cx, |_, window, cx| {
+ window.dispatch_action(Box::new(CloseWindow), cx);
+ });
+ }
+ })
.child(label)
.map(|this| match show_close_button {
ShowCloseButton::Hidden => this,
@@ -725,6 +725,8 @@ struct VimCommand {
args: Option<
Box<dyn Fn(Box<dyn Action>, String) -> Option<Box<dyn Action>> + Send + Sync + 'static>,
>,
+ /// Optional range Range to use if no range is specified.
+ default_range: Option<CommandRange>,
range: Option<
Box<
dyn Fn(Box<dyn Action>, &CommandRange) -> Option<Box<dyn Action>>
@@ -793,6 +795,11 @@ impl VimCommand {
self
}
+ fn default_range(mut self, range: CommandRange) -> Self {
+ self.default_range = Some(range);
+ self
+ }
+
fn count(mut self) -> Self {
self.has_count = true;
self
@@ -923,6 +930,7 @@ impl VimCommand {
self.args.as_ref()?(action, args)?
};
+ let range = range.as_ref().or(self.default_range.as_ref());
if let Some(range) = range {
self.range.as_ref().and_then(|f| f(action, range))
} else {
@@ -1121,6 +1129,7 @@ impl CommandRange {
self.end.as_ref().unwrap_or(&self.start)
}
+ /// Convert the `CommandRange` into a `Range<MultiBufferRow>`.
pub(crate) fn buffer_range(
&self,
vim: &Vim,
@@ -1152,6 +1161,14 @@ impl CommandRange {
None
}
}
+
+ /// The `CommandRange` representing the entire buffer.
+ fn buffer() -> Self {
+ Self {
+ start: Position::Line { row: 1, offset: 0 },
+ end: Some(Position::LastLine { offset: 0 }),
+ }
+ }
}
fn generate_commands(_: &App) -> Vec<VimCommand> {
@@ -1421,8 +1438,12 @@ fn generate_commands(_: &App) -> Vec<VimCommand> {
VimCommand::new(("delm", "arks"), ArgumentRequired)
.bang(DeleteMarks::AllLocal)
.args(|_, args| Some(DeleteMarks::Marks(args).boxed_clone())),
- VimCommand::new(("sor", "t"), SortLinesCaseSensitive).range(select_range),
- VimCommand::new(("sort i", ""), SortLinesCaseInsensitive).range(select_range),
+ VimCommand::new(("sor", "t"), SortLinesCaseSensitive)
+ .range(select_range)
+ .default_range(CommandRange::buffer()),
+ VimCommand::new(("sort i", ""), SortLinesCaseInsensitive)
+ .range(select_range)
+ .default_range(CommandRange::buffer()),
VimCommand::str(("E", "xplore"), "project_panel::ToggleFocus"),
VimCommand::str(("H", "explore"), "project_panel::ToggleFocus"),
VimCommand::str(("L", "explore"), "project_panel::ToggleFocus"),
@@ -2898,4 +2919,112 @@ mod test {
);
});
}
+
+ #[gpui::test]
+ async fn test_sort_commands(cx: &mut TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+
+ cx.set_state(
+ indoc! {"
+ Β«hornet
+ quirrel
+ elderbug
+ cornifer
+ idaΛΒ»
+ "},
+ Mode::Visual,
+ );
+
+ cx.simulate_keystrokes(": sort");
+ cx.simulate_keystrokes("enter");
+
+ cx.assert_state(
+ indoc! {"
+ Λcornifer
+ elderbug
+ hornet
+ ida
+ quirrel
+ "},
+ Mode::Normal,
+ );
+
+ // Assert that, by default, `:sort` takes case into consideration.
+ cx.set_state(
+ indoc! {"
+ Β«hornet
+ quirrel
+ Elderbug
+ cornifer
+ idaΛΒ»
+ "},
+ Mode::Visual,
+ );
+
+ cx.simulate_keystrokes(": sort");
+ cx.simulate_keystrokes("enter");
+
+ cx.assert_state(
+ indoc! {"
+ ΛElderbug
+ cornifer
+ hornet
+ ida
+ quirrel
+ "},
+ Mode::Normal,
+ );
+
+ // Assert that, if the `i` option is passed, `:sort` ignores case.
+ cx.set_state(
+ indoc! {"
+ Β«hornet
+ quirrel
+ Elderbug
+ cornifer
+ idaΛΒ»
+ "},
+ Mode::Visual,
+ );
+
+ cx.simulate_keystrokes(": sort space i");
+ cx.simulate_keystrokes("enter");
+
+ cx.assert_state(
+ indoc! {"
+ Λcornifer
+ Elderbug
+ hornet
+ ida
+ quirrel
+ "},
+ Mode::Normal,
+ );
+
+ // When no range is provided, sorts the whole buffer.
+ cx.set_state(
+ indoc! {"
+ Λhornet
+ quirrel
+ elderbug
+ cornifer
+ ida
+ "},
+ Mode::Normal,
+ );
+
+ cx.simulate_keystrokes(": sort");
+ cx.simulate_keystrokes("enter");
+
+ cx.assert_state(
+ indoc! {"
+ Λcornifer
+ elderbug
+ hornet
+ ida
+ quirrel
+ "},
+ Mode::Normal,
+ );
+ }
}
@@ -76,17 +76,18 @@ impl Vim {
Point::new(row, snapshot.line_len(multi_buffer::MultiBufferRow(row)))
};
- let number_result = if !selection.is_empty() {
- find_number_in_range(&snapshot, start, end)
+ let find_result = if !selection.is_empty() {
+ find_target(&snapshot, start, end, true)
} else {
- find_number(&snapshot, start)
+ find_target(&snapshot, start, end, false)
};
- if let Some((range, num, radix)) = number_result {
+ if let Some((range, target, radix)) = find_result {
let replace = match radix {
- 10 => increment_decimal_string(&num, delta),
- 16 => increment_hex_string(&num, delta),
- 2 => increment_binary_string(&num, delta),
+ 10 => increment_decimal_string(&target, delta),
+ 16 => increment_hex_string(&target, delta),
+ 2 => increment_binary_string(&target, delta),
+ 0 => increment_toggle_string(&target),
_ => unreachable!(),
};
delta += step as i64;
@@ -94,13 +95,6 @@ impl Vim {
if selection.is_empty() {
new_anchors.push((false, snapshot.anchor_after(range.end)))
}
- } else if let Some((range, boolean)) = find_boolean(&snapshot, start) {
- let replace = toggle_boolean(&boolean);
- delta += step as i64;
- edits.push((range.clone(), replace));
- if selection.is_empty() {
- new_anchors.push((false, snapshot.anchor_after(range.end)))
- }
} else if selection.is_empty() {
new_anchors.push((true, snapshot.anchor_after(start)))
}
@@ -200,83 +194,127 @@ fn increment_binary_string(num: &str, delta: i64) -> String {
format!("{:0width$b}", result, width = num.len())
}
-fn find_number_in_range(
+fn find_target(
snapshot: &MultiBufferSnapshot,
start: Point,
end: Point,
+ need_range: bool,
) -> Option<(Range<Point>, String, u32)> {
let start_offset = start.to_offset(snapshot);
let end_offset = end.to_offset(snapshot);
let mut offset = start_offset;
+ let mut first_char_is_num = snapshot
+ .chars_at(offset)
+ .next()
+ .map_or(false, |ch| ch.is_ascii_hexdigit());
+ let mut pre_char = String::new();
// Backward scan to find the start of the number, but stop at start_offset
- for ch in snapshot.reversed_chars_at(offset) {
- if ch.is_ascii_hexdigit() || ch == '-' || ch == 'b' || ch == 'x' {
- if offset == 0 {
- break;
- }
- offset -= ch.len_utf8();
- if offset < start_offset {
- offset = start_offset;
+ for ch in snapshot.reversed_chars_at(offset + 1) {
+ // Search boundaries
+ if offset == 0 || ch.is_whitespace() || (need_range && offset <= start_offset) {
+ break;
+ }
+
+ // Avoid the influence of hexadecimal letters
+ if first_char_is_num
+ && !ch.is_ascii_hexdigit()
+ && (ch != 'b' && ch != 'B')
+ && (ch != 'x' && ch != 'X')
+ && ch != '-'
+ {
+ // Used to determine if the initial character is a number.
+ if is_numeric_string(&pre_char) {
break;
+ } else {
+ first_char_is_num = false;
}
- } else {
- break;
}
+
+ pre_char.insert(0, ch);
+ offset -= ch.len_utf8();
}
let mut begin = None;
- let mut end_num = None;
- let mut num = String::new();
+ let mut end = None;
+ let mut target = String::new();
let mut radix = 10;
+ let mut is_num = false;
let mut chars = snapshot.chars_at(offset).peekable();
while let Some(ch) = chars.next() {
- if offset >= end_offset {
+ if need_range && offset >= end_offset {
break; // stop at end of selection
}
- if num == "0" && ch == 'b' && chars.peek().is_some() && chars.peek().unwrap().is_digit(2) {
+ if target == "0"
+ && (ch == 'b' || ch == 'B')
+ && chars.peek().is_some()
+ && chars.peek().unwrap().is_digit(2)
+ {
radix = 2;
begin = None;
- num = String::new();
- } else if num == "0"
- && ch == 'x'
+ target = String::new();
+ } else if target == "0"
+ && (ch == 'x' || ch == 'X')
&& chars.peek().is_some()
&& chars.peek().unwrap().is_ascii_hexdigit()
{
radix = 16;
begin = None;
- num = String::new();
- }
-
- if ch.is_digit(radix)
- || (begin.is_none()
+ target = String::new();
+ } else if ch == '.' {
+ is_num = false;
+ begin = None;
+ target = String::new();
+ } else if ch.is_digit(radix)
+ || ((begin.is_none() || !is_num)
&& ch == '-'
&& chars.peek().is_some()
&& chars.peek().unwrap().is_digit(radix))
{
+ if !is_num {
+ is_num = true;
+ begin = Some(offset);
+ target = String::new();
+ } else if begin.is_none() {
+ begin = Some(offset);
+ }
+ target.push(ch);
+ } else if ch.is_ascii_alphabetic() && !is_num {
if begin.is_none() {
begin = Some(offset);
}
- num.push(ch);
- } else if begin.is_some() {
- end_num = Some(offset);
+ target.push(ch);
+ } else if begin.is_some() && (is_num || !is_num && is_toggle_word(&target)) {
+ // End of matching
+ end = Some(offset);
break;
} else if ch == '\n' {
break;
+ } else {
+ // To match the next word
+ is_num = false;
+ begin = None;
+ target = String::new();
}
offset += ch.len_utf8();
}
- if let Some(begin) = begin {
- let end_num = end_num.unwrap_or(offset);
+ if let Some(begin) = begin
+ && (is_num || !is_num && is_toggle_word(&target))
+ {
+ if !is_num {
+ radix = 0;
+ }
+
+ let end = end.unwrap_or(offset);
Some((
- begin.to_point(snapshot)..end_num.to_point(snapshot),
- num,
+ begin.to_point(snapshot)..end.to_point(snapshot),
+ target,
radix,
))
} else {
@@ -284,133 +322,38 @@ fn find_number_in_range(
}
}
-fn find_number(
- snapshot: &MultiBufferSnapshot,
- start: Point,
-) -> Option<(Range<Point>, String, u32)> {
- let mut offset = start.to_offset(snapshot);
-
- let ch0 = snapshot.chars_at(offset).next();
- if ch0.as_ref().is_some_and(char::is_ascii_hexdigit) || matches!(ch0, Some('-' | 'b' | 'x')) {
- // go backwards to the start of any number the selection is within
- for ch in snapshot.reversed_chars_at(offset) {
- if ch.is_ascii_hexdigit() || ch == '-' || ch == 'b' || ch == 'x' {
- offset -= ch.len_utf8();
- continue;
- }
- break;
- }
+fn is_numeric_string(s: &str) -> bool {
+ if s.is_empty() {
+ return false;
}
- let mut begin = None;
- let mut end = None;
- let mut num = String::new();
- let mut radix = 10;
-
- let mut chars = snapshot.chars_at(offset).peekable();
- // find the next number on the line (may start after the original cursor position)
- while let Some(ch) = chars.next() {
- if num == "0" && ch == 'b' && chars.peek().is_some() && chars.peek().unwrap().is_digit(2) {
- radix = 2;
- begin = None;
- num = String::new();
- }
- if num == "0"
- && ch == 'x'
- && chars.peek().is_some()
- && chars.peek().unwrap().is_ascii_hexdigit()
- {
- radix = 16;
- begin = None;
- num = String::new();
- }
+ let (_, rest) = if let Some(r) = s.strip_prefix('-') {
+ (true, r)
+ } else {
+ (false, s)
+ };
- if ch.is_digit(radix)
- || (begin.is_none()
- && ch == '-'
- && chars.peek().is_some()
- && chars.peek().unwrap().is_digit(radix))
- {
- if begin.is_none() {
- begin = Some(offset);
- }
- num.push(ch);
- } else if begin.is_some() {
- end = Some(offset);
- break;
- } else if ch == '\n' {
- break;
- }
- offset += ch.len_utf8();
+ if rest.is_empty() {
+ return false;
}
- if let Some(begin) = begin {
- let end = end.unwrap_or(offset);
- Some((begin.to_point(snapshot)..end.to_point(snapshot), num, radix))
+
+ if let Some(digits) = rest.strip_prefix("0b").or_else(|| rest.strip_prefix("0B")) {
+ digits.is_empty() || digits.chars().all(|c| c == '0' || c == '1')
+ } else if let Some(digits) = rest.strip_prefix("0x").or_else(|| rest.strip_prefix("0X")) {
+ digits.is_empty() || digits.chars().all(|c| c.is_ascii_hexdigit())
} else {
- None
+ !rest.is_empty() && rest.chars().all(|c| c.is_ascii_digit())
}
}
-fn find_boolean(snapshot: &MultiBufferSnapshot, start: Point) -> Option<(Range<Point>, String)> {
- let mut offset = start.to_offset(snapshot);
-
- let ch0 = snapshot.chars_at(offset).next();
- if ch0.as_ref().is_some_and(|c| c.is_ascii_alphabetic()) {
- for ch in snapshot.reversed_chars_at(offset) {
- if ch.is_ascii_alphabetic() {
- offset -= ch.len_utf8();
- continue;
- }
- break;
- }
- }
-
- let mut begin = None;
- let mut end = None;
- let mut word = String::new();
-
- let chars = snapshot.chars_at(offset);
-
- for ch in chars {
- if ch.is_ascii_alphabetic() {
- if begin.is_none() {
- begin = Some(offset);
- }
- word.push(ch);
- } else if begin.is_some() {
- end = Some(offset);
- let word_lower = word.to_lowercase();
- if BOOLEAN_PAIRS
- .iter()
- .any(|(a, b)| word_lower == *a || word_lower == *b)
- {
- return Some((
- begin.unwrap().to_point(snapshot)..end.unwrap().to_point(snapshot),
- word,
- ));
- }
- begin = None;
- end = None;
- word = String::new();
- } else if ch == '\n' {
- break;
- }
- offset += ch.len_utf8();
- }
- if let Some(begin) = begin {
- let end = end.unwrap_or(offset);
- let word_lower = word.to_lowercase();
- if BOOLEAN_PAIRS
- .iter()
- .any(|(a, b)| word_lower == *a || word_lower == *b)
- {
- return Some((begin.to_point(snapshot)..end.to_point(snapshot), word));
- }
- }
- None
+fn is_toggle_word(word: &str) -> bool {
+ let lower = word.to_lowercase();
+ BOOLEAN_PAIRS
+ .iter()
+ .any(|(a, b)| lower == *a || lower == *b)
}
-fn toggle_boolean(boolean: &str) -> String {
+fn increment_toggle_string(boolean: &str) -> String {
let lower = boolean.to_lowercase();
let target = BOOLEAN_PAIRS
@@ -802,7 +745,7 @@ mod test {
}
#[gpui::test]
- async fn test_toggle_boolean(cx: &mut gpui::TestAppContext) {
+ async fn test_increment_toggle(cx: &mut gpui::TestAppContext) {
let mut cx = VimTestContext::new(cx, true).await;
cx.set_state("let enabled = trΛue;", Mode::Normal);
@@ -860,6 +803,23 @@ mod test {
cx.assert_state("let enabled = ΛOff;", Mode::Normal);
}
+ #[gpui::test]
+ async fn test_increment_order(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+
+ cx.set_state("aaΛa false 1 2 3", Mode::Normal);
+ cx.simulate_keystrokes("ctrl-a");
+ cx.assert_state("aaa truΛe 1 2 3", Mode::Normal);
+
+ cx.set_state("aaΛa 1 false 2 3", Mode::Normal);
+ cx.simulate_keystrokes("ctrl-a");
+ cx.assert_state("aaa Λ2 false 2 3", Mode::Normal);
+
+ cx.set_state("trueΛ 1 2 3", Mode::Normal);
+ cx.simulate_keystrokes("ctrl-a");
+ cx.assert_state("true Λ2 2 3", Mode::Normal);
+ }
+
#[gpui::test]
async fn test_increment_visual_partial_number(cx: &mut gpui::TestAppContext) {
let mut cx = NeovimBackedTestContext::new(cx).await;
@@ -4041,6 +4041,25 @@ impl NavHistory {
self.0.lock().mode = NavigationMode::Normal;
}
+ pub fn clear(&mut self, cx: &mut App) {
+ let mut state = self.0.lock();
+
+ if state.backward_stack.is_empty()
+ && state.forward_stack.is_empty()
+ && state.closed_stack.is_empty()
+ && state.paths_by_item.is_empty()
+ {
+ return;
+ }
+
+ state.mode = NavigationMode::Normal;
+ state.backward_stack.clear();
+ state.forward_stack.clear();
+ state.closed_stack.clear();
+ state.paths_by_item.clear();
+ state.did_update(cx);
+ }
+
pub fn pop(&mut self, mode: NavigationMode, cx: &mut App) -> Option<NavigationEntry> {
let mut state = self.0.lock();
let entry = match mode {
@@ -166,6 +166,7 @@ pub trait SearchableItem: Item + EventEmitter<SearchEvent> {
window: &mut Window,
cx: &mut Context<Self>,
) -> Option<usize>;
+ fn set_search_is_case_sensitive(&mut self, _: Option<bool>, _: &mut Context<Self>) {}
}
pub trait SearchableItemHandle: ItemHandle {
@@ -234,6 +235,8 @@ pub trait SearchableItemHandle: ItemHandle {
window: &mut Window,
cx: &mut App,
);
+
+ fn set_search_is_case_sensitive(&self, is_case_sensitive: Option<bool>, cx: &mut App);
}
impl<T: SearchableItem> SearchableItemHandle for Entity<T> {
@@ -390,6 +393,11 @@ impl<T: SearchableItem> SearchableItemHandle for Entity<T> {
this.toggle_filtered_search_ranges(enabled, window, cx)
});
}
+ fn set_search_is_case_sensitive(&self, enabled: Option<bool>, cx: &mut App) {
+ self.update(cx, |this, cx| {
+ this.set_search_is_case_sensitive(enabled, cx)
+ });
+ }
}
impl From<Box<dyn SearchableItemHandle>> for AnyView {
@@ -199,6 +199,8 @@ actions!(
AddFolderToProject,
/// Clears all notifications.
ClearAllNotifications,
+ /// Clears all navigation history, including forward/backward navigation, recently opened files, and recently closed tabs. **This action is irreversible**.
+ ClearNavigationHistory,
/// Closes the active dock.
CloseActiveDock,
/// Closes all docks.
@@ -1917,6 +1919,12 @@ impl Workspace {
.collect()
}
+ pub fn clear_navigation_history(&mut self, _window: &mut Window, cx: &mut Context<Workspace>) {
+ for pane in &self.panes {
+ pane.update(cx, |pane, cx| pane.nav_history_mut().clear(cx));
+ }
+ }
+
fn navigate_history(
&mut self,
pane: WeakEntity<Pane>,
@@ -5858,6 +5866,11 @@ impl Workspace {
workspace.clear_all_notifications(cx);
},
))
+ .on_action(cx.listener(
+ |workspace: &mut Workspace, _: &ClearNavigationHistory, window, cx| {
+ workspace.clear_navigation_history(window, cx);
+ },
+ ))
.on_action(cx.listener(
|workspace: &mut Workspace, _: &SuppressNotification, _, cx| {
if let Some((notification_id, _)) = workspace.notifications.pop() {
@@ -2352,8 +2352,8 @@ impl Snapshot {
self.entries_by_path.first()
}
- /// TODO: what's the difference between `root_dir` and `abs_path`?
- /// is there any? if so, document it.
+ /// Returns `None` for a single file worktree, or `Some(self.abs_path())` if
+ /// it is a directory.
pub fn root_dir(&self) -> Option<Arc<Path>> {
self.root_entry()
.filter(|entry| entry.is_dir())
@@ -15,7 +15,7 @@ use extension::ExtensionHostProxy;
use fs::{Fs, RealFs};
use futures::{StreamExt, channel::oneshot, future};
use git::GitHostingProviderRegistry;
-use gpui::{App, AppContext, Application, AsyncApp, Focusable as _, UpdateGlobal as _};
+use gpui::{App, AppContext, Application, AsyncApp, Focusable as _, QuitMode, UpdateGlobal as _};
use gpui_tokio::Tokio;
use language::LanguageRegistry;
@@ -87,31 +87,33 @@ fn files_not_created_on_launch(errors: HashMap<io::ErrorKind, Vec<&Path>>) {
.collect::<Vec<_>>().join("\n\n");
eprintln!("{message}: {error_details}");
- Application::new().run(move |cx| {
- if let Ok(window) = cx.open_window(gpui::WindowOptions::default(), |_, cx| {
- cx.new(|_| gpui::Empty)
- }) {
- window
- .update(cx, |_, window, cx| {
- let response = window.prompt(
- gpui::PromptLevel::Critical,
- message,
- Some(&error_details),
- &["Exit"],
- cx,
- );
-
- cx.spawn_in(window, async move |_, cx| {
- response.await?;
- cx.update(|_, cx| cx.quit())
+ Application::new()
+ .with_quit_mode(QuitMode::Explicit)
+ .run(move |cx| {
+ if let Ok(window) = cx.open_window(gpui::WindowOptions::default(), |_, cx| {
+ cx.new(|_| gpui::Empty)
+ }) {
+ window
+ .update(cx, |_, window, cx| {
+ let response = window.prompt(
+ gpui::PromptLevel::Critical,
+ message,
+ Some(&error_details),
+ &["Exit"],
+ cx,
+ );
+
+ cx.spawn_in(window, async move |_, cx| {
+ response.await?;
+ cx.update(|_, cx| cx.quit())
+ })
+ .detach_and_log_err(cx);
})
- .detach_and_log_err(cx);
- })
- .log_err();
- } else {
- fail_to_open_window(anyhow::anyhow!("{message}: {error_details}"), cx)
- }
- })
+ .log_err();
+ } else {
+ fail_to_open_window(anyhow::anyhow!("{message}: {error_details}"), cx)
+ }
+ })
}
fn fail_to_open_window_async(e: anyhow::Error, cx: &mut AsyncApp) {
@@ -537,7 +539,7 @@ pub fn main() {
});
AppState::set_global(Arc::downgrade(&app_state), cx);
- auto_update::init(client.http_client(), cx);
+ auto_update::init(client.clone(), cx);
dap_adapters::init(cx);
auto_update_ui::init(cx);
reliability::init(
@@ -274,16 +274,27 @@ pub fn init(cx: &mut App) {
}
fn bind_on_window_closed(cx: &mut App) -> Option<gpui::Subscription> {
- WorkspaceSettings::get_global(cx)
- .on_last_window_closed
- .is_quit_app()
- .then(|| {
- cx.on_window_closed(|cx| {
- if cx.windows().is_empty() {
- cx.quit();
- }
+ #[cfg(target_os = "macos")]
+ {
+ WorkspaceSettings::get_global(cx)
+ .on_last_window_closed
+ .is_quit_app()
+ .then(|| {
+ cx.on_window_closed(|cx| {
+ if cx.windows().is_empty() {
+ cx.quit();
+ }
+ })
})
- })
+ }
+ #[cfg(not(target_os = "macos"))]
+ {
+ Some(cx.on_window_closed(|cx| {
+ if cx.windows().is_empty() {
+ cx.quit();
+ }
+ }))
+ }
}
pub fn build_window_options(display_uuid: Option<Uuid>, cx: &mut App) -> WindowOptions {
@@ -652,7 +652,7 @@ impl Zeta {
.header(ZED_VERSION_HEADER_NAME, app_version.to_string())
.body(
serde_json::to_string(&AcceptEditPredictionBody {
- request_id: request_id.0,
+ request_id: request_id.0.to_string(),
})?
.into(),
)?)
@@ -735,6 +735,8 @@ impl Zeta {
return anyhow::Ok(None);
};
+ let request_id = Uuid::from_str(&request_id).context("failed to parse request id")?;
+
let edit_preview = edit_preview.await;
Ok(Some(EditPrediction {
@@ -2162,7 +2164,7 @@ mod tests {
.status(200)
.body(
serde_json::to_string(&PredictEditsResponse {
- request_id: Uuid::new_v4(),
+ request_id: Uuid::new_v4().to_string(),
output_excerpt: completion_response.lock().clone(),
})
.unwrap()
@@ -11,6 +11,9 @@ workspace = true
[lib]
path = "src/zeta2.rs"
+[features]
+llm-response-cache = []
+
[dependencies]
anyhow.workspace = true
arrayvec.workspace = true
@@ -1,21 +1,14 @@
use std::{ops::Range, sync::Arc};
-use gpui::{AsyncApp, Entity};
+use gpui::{AsyncApp, Entity, SharedString};
use language::{Anchor, Buffer, BufferSnapshot, EditPreview, OffsetRangeExt, TextBufferSnapshot};
-use uuid::Uuid;
-#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Hash)]
-pub struct EditPredictionId(pub Uuid);
-
-impl Into<Uuid> for EditPredictionId {
- fn into(self) -> Uuid {
- self.0
- }
-}
+#[derive(Clone, Default, Debug, PartialEq, Eq, Hash)]
+pub struct EditPredictionId(pub SharedString);
impl From<EditPredictionId> for gpui::ElementId {
fn from(value: EditPredictionId) -> Self {
- gpui::ElementId::Uuid(value.0)
+ gpui::ElementId::Name(value.0)
}
}
@@ -149,7 +142,7 @@ mod tests {
.await;
let prediction = EditPrediction {
- id: EditPredictionId(Uuid::new_v4()),
+ id: EditPredictionId("prediction-1".into()),
edits,
snapshot: cx.read(|cx| buffer.read(cx).snapshot()),
buffer: buffer.clone(),
@@ -0,0 +1,197 @@
+use anyhow::{Context as _, Result, anyhow};
+use language::{Anchor, BufferSnapshot, OffsetRangeExt as _, TextBufferSnapshot};
+use std::ops::Range;
+use std::path::Path;
+use std::sync::Arc;
+
+pub async fn parse_xml_edits<'a>(
+ mut input: &'a str,
+ get_buffer: impl Fn(&Path) -> Option<(&'a BufferSnapshot, &'a [Range<Anchor>])> + Send,
+) -> Result<(&'a BufferSnapshot, Vec<(Range<Anchor>, Arc<str>)>)> {
+ let edits_tag = parse_tag(&mut input, "edits")?.context("No edits tag")?;
+
+ input = edits_tag.body;
+
+ let file_path = edits_tag
+ .attributes
+ .trim_start()
+ .strip_prefix("path")
+ .context("no file attribute on edits tag")?
+ .trim_end()
+ .strip_prefix('=')
+ .context("no value for path attribute")?
+ .trim()
+ .trim_start_matches('"')
+ .trim_end_matches('"');
+
+ let (buffer, context_ranges) = get_buffer(file_path.as_ref())
+ .with_context(|| format!("no buffer for file {file_path}"))?;
+
+ let mut edits = vec![];
+ while let Some(old_text_tag) = parse_tag(&mut input, "old_text")? {
+ let new_text_tag =
+ parse_tag(&mut input, "new_text")?.context("no new_text tag following old_text")?;
+ edits.extend(resolve_new_text_old_text_in_buffer(
+ new_text_tag.body,
+ old_text_tag.body,
+ buffer,
+ context_ranges,
+ )?);
+ }
+
+ Ok((buffer, edits))
+}
+
+fn resolve_new_text_old_text_in_buffer(
+ new_text: &str,
+ old_text: &str,
+ buffer: &TextBufferSnapshot,
+ ranges: &[Range<Anchor>],
+) -> Result<impl Iterator<Item = (Range<Anchor>, Arc<str>)>, anyhow::Error> {
+ let context_offset = if old_text.is_empty() {
+ Ok(0)
+ } else {
+ let mut offset = None;
+ for range in ranges {
+ let range = range.to_offset(buffer);
+ let text = buffer.text_for_range(range.clone()).collect::<String>();
+ for (match_offset, _) in text.match_indices(old_text) {
+ if offset.is_some() {
+ anyhow::bail!("old_text is not unique enough:\n{}", old_text);
+ }
+ offset = Some(range.start + match_offset);
+ }
+ }
+ offset.ok_or_else(|| anyhow!("Failed to match old_text:\n{}", old_text))
+ }?;
+
+ let edits_within_hunk = language::text_diff(&old_text, &new_text);
+ Ok(edits_within_hunk
+ .into_iter()
+ .map(move |(inner_range, inner_text)| {
+ (
+ buffer.anchor_after(context_offset + inner_range.start)
+ ..buffer.anchor_before(context_offset + inner_range.end),
+ inner_text,
+ )
+ }))
+}
+
+struct ParsedTag<'a> {
+ attributes: &'a str,
+ body: &'a str,
+}
+
+fn parse_tag<'a>(input: &mut &'a str, tag: &str) -> Result<Option<ParsedTag<'a>>> {
+ let open_tag = format!("<{}", tag);
+ let close_tag = format!("</{}>", tag);
+ let Some(start_ix) = input.find(&open_tag) else {
+ return Ok(None);
+ };
+ let start_ix = start_ix + open_tag.len();
+ let closing_bracket_ix = start_ix
+ + input[start_ix..]
+ .find('>')
+ .with_context(|| format!("missing > after {tag}"))?;
+ let attributes = &input[start_ix..closing_bracket_ix].trim();
+ let end_ix = closing_bracket_ix
+ + input[closing_bracket_ix..]
+ .find(&close_tag)
+ .with_context(|| format!("no `{close_tag}` tag"))?;
+ let body = &input[closing_bracket_ix + '>'.len_utf8()..end_ix];
+ let body = body.strip_prefix('\n').unwrap_or(body);
+ *input = &input[end_ix + close_tag.len()..];
+ Ok(Some(ParsedTag { attributes, body }))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use gpui::TestAppContext;
+ use indoc::indoc;
+ use language::Point;
+ use project::{FakeFs, Project};
+ use serde_json::json;
+ use settings::SettingsStore;
+ use util::path;
+
+ #[test]
+ fn test_parse_tags() {
+ let mut input = indoc! {r#"
+ Prelude
+ <tag attr="foo">
+ tag value
+ </tag>
+ "# };
+ let parsed = parse_tag(&mut input, "tag").unwrap().unwrap();
+ assert_eq!(parsed.attributes, "attr=\"foo\"");
+ assert_eq!(parsed.body, "tag value\n");
+ assert_eq!(input, "\n");
+ }
+
+ #[gpui::test]
+ async fn test_parse_xml_edits(cx: &mut TestAppContext) {
+ let fs = init_test(cx);
+
+ let buffer_1_text = indoc! {r#"
+ one two three four
+ five six seven eight
+ nine ten eleven twelve
+ "# };
+
+ fs.insert_tree(
+ path!("/root"),
+ json!({
+ "file1": buffer_1_text,
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
+ let buffer = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/root/file1"), cx)
+ })
+ .await
+ .unwrap();
+ let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+
+ let edits = indoc! {r#"
+ <edits path="root/file1">
+ <old_text>
+ five six seven eight
+ </old_text>
+ <new_text>
+ five SIX seven eight!
+ </new_text>
+ </edits>
+ "#};
+
+ let (buffer, edits) = parse_xml_edits(edits, |_path| {
+ Some((&buffer_snapshot, &[(Anchor::MIN..Anchor::MAX)] as &[_]))
+ })
+ .await
+ .unwrap();
+
+ let edits = edits
+ .into_iter()
+ .map(|(range, text)| (range.to_point(&buffer), text))
+ .collect::<Vec<_>>();
+ assert_eq!(
+ edits,
+ &[
+ (Point::new(1, 5)..Point::new(1, 8), "SIX".into()),
+ (Point::new(1, 20)..Point::new(1, 20), "!".into())
+ ]
+ );
+ }
+
+ fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
+ cx.update(|cx| {
+ let settings_store = SettingsStore::test(cx);
+ cx.set_global(settings_store);
+ });
+
+ FakeFs::new(cx.background_executor.clone())
+ }
+}
@@ -1,4 +1,4 @@
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Context as _, Result, anyhow, bail};
use chrono::TimeDelta;
use client::{Client, EditPredictionUsage, UserStore};
use cloud_llm_client::predict_edits_v3::{self, PromptFormat, Signature};
@@ -6,8 +6,8 @@ use cloud_llm_client::{
AcceptEditPredictionBody, EXPIRED_LLM_TOKEN_HEADER_NAME, MINIMUM_REQUIRED_VERSION_HEADER_NAME,
ZED_VERSION_HEADER_NAME,
};
-use cloud_zeta2_prompt::DEFAULT_MAX_PROMPT_BYTES;
use cloud_zeta2_prompt::retrieval_prompt::{SearchToolInput, SearchToolQuery};
+use cloud_zeta2_prompt::{CURSOR_MARKER, DEFAULT_MAX_PROMPT_BYTES};
use collections::HashMap;
use edit_prediction_context::{
DeclarationId, DeclarationStyle, EditPredictionContext, EditPredictionContextOptions,
@@ -30,8 +30,8 @@ use project::Project;
use release_channel::AppVersion;
use serde::de::DeserializeOwned;
use std::collections::{VecDeque, hash_map};
-use uuid::Uuid;
+use std::env;
use std::ops::Range;
use std::path::Path;
use std::str::FromStr as _;
@@ -47,6 +47,7 @@ mod prediction;
mod provider;
pub mod retrieval_search;
pub mod udiff;
+mod xml_edits;
use crate::merge_excerpts::merge_excerpts;
use crate::prediction::EditPrediction;
@@ -88,8 +89,24 @@ pub const DEFAULT_OPTIONS: ZetaOptions = ZetaOptions {
buffer_change_grouping_interval: Duration::from_secs(1),
};
-static MODEL_ID: LazyLock<String> =
- LazyLock::new(|| std::env::var("ZED_ZETA2_MODEL").unwrap_or("yqvev8r3".to_string()));
+static USE_OLLAMA: LazyLock<bool> =
+ LazyLock::new(|| env::var("ZED_ZETA2_OLLAMA").is_ok_and(|var| !var.is_empty()));
+static MODEL_ID: LazyLock<String> = LazyLock::new(|| {
+ env::var("ZED_ZETA2_MODEL").unwrap_or(if *USE_OLLAMA {
+ "qwen3-coder:30b".to_string()
+ } else {
+ "yqvev8r3".to_string()
+ })
+});
+static PREDICT_EDITS_URL: LazyLock<Option<String>> = LazyLock::new(|| {
+ env::var("ZED_PREDICT_EDITS_URL").ok().or_else(|| {
+ if *USE_OLLAMA {
+ Some("http://localhost:11434/v1/chat/completions".into())
+ } else {
+ None
+ }
+ })
+});
pub struct Zeta2FeatureFlag;
@@ -115,6 +132,15 @@ pub struct Zeta {
options: ZetaOptions,
update_required: bool,
debug_tx: Option<mpsc::UnboundedSender<ZetaDebugInfo>>,
+ #[cfg(feature = "llm-response-cache")]
+ llm_response_cache: Option<Arc<dyn LlmResponseCache>>,
+}
+
+#[cfg(feature = "llm-response-cache")]
+pub trait LlmResponseCache: Send + Sync {
+ fn get_key(&self, url: &gpui::http_client::Url, body: &str) -> u64;
+ fn read_response(&self, key: u64) -> Option<String>;
+ fn write_response(&self, key: u64, value: &str);
}
#[derive(Debug, Clone, PartialEq)]
@@ -343,9 +369,16 @@ impl Zeta {
),
update_required: false,
debug_tx: None,
+ #[cfg(feature = "llm-response-cache")]
+ llm_response_cache: None,
}
}
+ #[cfg(feature = "llm-response-cache")]
+ pub fn with_llm_response_cache(&mut self, cache: Arc<dyn LlmResponseCache>) {
+ self.llm_response_cache = Some(cache);
+ }
+
pub fn debug_info(&mut self) -> mpsc::UnboundedReceiver<ZetaDebugInfo> {
let (debug_watch_tx, debug_watch_rx) = mpsc::unbounded();
self.debug_tx = Some(debug_watch_tx);
@@ -567,13 +600,13 @@ impl Zeta {
let Some(prediction) = project_state.current_prediction.take() else {
return;
};
- let request_id = prediction.prediction.id.into();
+ let request_id = prediction.prediction.id.to_string();
let client = self.client.clone();
let llm_token = self.llm_token.clone();
let app_version = AppVersion::global(cx);
cx.spawn(async move |this, cx| {
- let url = if let Ok(predict_edits_url) = std::env::var("ZED_ACCEPT_PREDICTION_URL") {
+ let url = if let Ok(predict_edits_url) = env::var("ZED_ACCEPT_PREDICTION_URL") {
http_client::Url::parse(&predict_edits_url)?
} else {
client
@@ -585,7 +618,10 @@ impl Zeta {
.background_spawn(Self::send_api_request::<()>(
move |builder| {
let req = builder.uri(url.as_ref()).body(
- serde_json::to_string(&AcceptEditPredictionBody { request_id })?.into(),
+ serde_json::to_string(&AcceptEditPredictionBody {
+ request_id: request_id.clone(),
+ })?
+ .into(),
);
Ok(req?)
},
@@ -715,6 +751,9 @@ impl Zeta {
})
.collect::<Vec<_>>();
+ #[cfg(feature = "llm-response-cache")]
+ let llm_response_cache = self.llm_response_cache.clone();
+
let request_task = cx.background_spawn({
let active_buffer = active_buffer.clone();
async move {
@@ -875,7 +914,7 @@ impl Zeta {
None
};
- if cfg!(debug_assertions) && std::env::var("ZED_ZETA2_SKIP_REQUEST").is_ok() {
+ if cfg!(debug_assertions) && env::var("ZED_ZETA2_SKIP_REQUEST").is_ok() {
if let Some(debug_response_tx) = debug_response_tx {
debug_response_tx
.send((Err("Request skipped".to_string()), TimeDelta::zero()))
@@ -904,8 +943,15 @@ impl Zeta {
log::trace!("Sending edit prediction request");
let before_request = chrono::Utc::now();
- let response =
- Self::send_raw_llm_request(client, llm_token, app_version, request).await;
+ let response = Self::send_raw_llm_request(
+ request,
+ client,
+ llm_token,
+ app_version,
+ #[cfg(feature = "llm-response-cache")]
+ llm_response_cache,
+ )
+ .await;
let request_time = chrono::Utc::now() - before_request;
log::trace!("Got edit prediction response");
@@ -923,24 +969,40 @@ impl Zeta {
}
let (res, usage) = response?;
- let request_id = EditPredictionId(Uuid::from_str(&res.id)?);
- let Some(output_text) = text_from_response(res) else {
- return Ok((None, usage))
+ let request_id = EditPredictionId(res.id.clone().into());
+ let Some(mut output_text) = text_from_response(res) else {
+ return Ok((None, usage));
};
- let (edited_buffer_snapshot, edits) =
- crate::udiff::parse_diff(&output_text, |path| {
- included_files
- .iter()
- .find_map(|(_, buffer, probe_path, ranges)| {
- if probe_path.as_ref() == path {
- Some((buffer, ranges.as_slice()))
- } else {
- None
- }
- })
- })
- .await?;
+ if output_text.contains(CURSOR_MARKER) {
+ log::trace!("Stripping out {CURSOR_MARKER} from response");
+ output_text = output_text.replace(CURSOR_MARKER, "");
+ }
+
+ let get_buffer_from_context = |path: &Path| {
+ included_files
+ .iter()
+ .find_map(|(_, buffer, probe_path, ranges)| {
+ if probe_path.as_ref() == path {
+ Some((buffer, ranges.as_slice()))
+ } else {
+ None
+ }
+ })
+ };
+
+ let (edited_buffer_snapshot, edits) = match options.prompt_format {
+ PromptFormat::NumLinesUniDiff => {
+ crate::udiff::parse_diff(&output_text, get_buffer_from_context).await?
+ }
+ PromptFormat::OldTextNewText => {
+ crate::xml_edits::parse_xml_edits(&output_text, get_buffer_from_context)
+ .await?
+ }
+ _ => {
+ bail!("unsupported prompt format {}", options.prompt_format)
+ }
+ };
let edited_buffer = included_files
.iter()
@@ -951,9 +1013,17 @@ impl Zeta {
None
}
})
- .context("Failed to find buffer in included_buffers, even though we just found the snapshot")?;
-
- anyhow::Ok((Some((request_id, edited_buffer, edited_buffer_snapshot.clone(), edits)), usage))
+ .context("Failed to find buffer in included_buffers")?;
+
+ anyhow::Ok((
+ Some((
+ request_id,
+ edited_buffer,
+ edited_buffer_snapshot.clone(),
+ edits,
+ )),
+ usage,
+ ))
}
});
@@ -975,12 +1045,15 @@ impl Zeta {
}
async fn send_raw_llm_request(
+ request: open_ai::Request,
client: Arc<Client>,
llm_token: LlmApiToken,
app_version: SemanticVersion,
- request: open_ai::Request,
+ #[cfg(feature = "llm-response-cache")] llm_response_cache: Option<
+ Arc<dyn LlmResponseCache>,
+ >,
) -> Result<(open_ai::Response, Option<EditPredictionUsage>)> {
- let url = if let Ok(predict_edits_url) = std::env::var("ZED_PREDICT_EDITS_URL") {
+ let url = if let Some(predict_edits_url) = PREDICT_EDITS_URL.as_ref() {
http_client::Url::parse(&predict_edits_url)?
} else {
client
@@ -988,7 +1061,21 @@ impl Zeta {
.build_zed_llm_url("/predict_edits/raw", &[])?
};
- Self::send_api_request(
+ #[cfg(feature = "llm-response-cache")]
+ let cache_key = if let Some(cache) = llm_response_cache {
+ let request_json = serde_json::to_string(&request)?;
+ let key = cache.get_key(&url, &request_json);
+
+ if let Some(response_str) = cache.read_response(key) {
+ return Ok((serde_json::from_str(&response_str)?, None));
+ }
+
+ Some((cache, key))
+ } else {
+ None
+ };
+
+ let (response, usage) = Self::send_api_request(
|builder| {
let req = builder
.uri(url.as_ref())
@@ -999,7 +1086,14 @@ impl Zeta {
llm_token,
app_version,
)
- .await
+ .await?;
+
+ #[cfg(feature = "llm-response-cache")]
+ if let Some((cache, key)) = cache_key {
+ cache.write_response(key, &serde_json::to_string(&response)?);
+ }
+
+ Ok((response, usage))
}
fn handle_api_response<T>(
@@ -1267,10 +1361,20 @@ impl Zeta {
reasoning_effort: None,
};
+ #[cfg(feature = "llm-response-cache")]
+ let llm_response_cache = self.llm_response_cache.clone();
+
cx.spawn(async move |this, cx| {
log::trace!("Sending search planning request");
- let response =
- Self::send_raw_llm_request(client, llm_token, app_version, request).await;
+ let response = Self::send_raw_llm_request(
+ request,
+ client,
+ llm_token,
+ app_version,
+ #[cfg(feature = "llm-response-cache")]
+ llm_response_cache,
+ )
+ .await;
let mut response = Self::handle_api_response(&this, response, cx)?;
log::trace!("Got search planning response");
@@ -1298,7 +1402,8 @@ impl Zeta {
continue;
}
- let input: SearchToolInput = serde_json::from_str(&function.arguments)?;
+ let input: SearchToolInput = serde_json::from_str(&function.arguments)
+ .with_context(|| format!("invalid search json {}", &function.arguments))?;
queries.extend(input.queries);
}
@@ -1358,6 +1463,16 @@ impl Zeta {
})
}
+ pub fn set_context(
+ &mut self,
+ project: Entity<Project>,
+ context: HashMap<Entity<Buffer>, Vec<Range<Anchor>>>,
+ ) {
+ if let Some(zeta_project) = self.projects.get_mut(&project.entity_id()) {
+ zeta_project.context = Some(context);
+ }
+ }
+
fn gather_nearby_diagnostics(
cursor_offset: usize,
diagnostic_sets: &[(LanguageServerId, DiagnosticSet)],
@@ -54,7 +54,7 @@ toml.workspace = true
util.workspace = true
watch.workspace = true
zeta.workspace = true
-zeta2.workspace = true
+zeta2 = { workspace = true, features = ["llm-response-cache"] }
zlog.workspace = true
[dev-dependencies]
@@ -1,5 +1,4 @@
use std::{
- fs,
io::IsTerminal,
path::{Path, PathBuf},
sync::Arc,
@@ -12,9 +11,9 @@ use gpui::AsyncApp;
use zeta2::udiff::DiffLine;
use crate::{
+ PromptFormat,
example::{Example, NamedExample},
headless::ZetaCliAppState,
- paths::CACHE_DIR,
predict::{PredictionDetails, zeta2_predict},
};
@@ -22,7 +21,11 @@ use crate::{
pub struct EvaluateArguments {
example_paths: Vec<PathBuf>,
#[clap(long)]
- re_run: bool,
+ skip_cache: bool,
+ #[arg(long, value_enum, default_value_t = PromptFormat::default())]
+ prompt_format: PromptFormat,
+ #[arg(long)]
+ use_expected_context: bool,
}
pub async fn run_evaluate(
@@ -33,7 +36,17 @@ pub async fn run_evaluate(
let example_len = args.example_paths.len();
let all_tasks = args.example_paths.into_iter().map(|path| {
let app_state = app_state.clone();
- cx.spawn(async move |cx| run_evaluate_one(&path, args.re_run, app_state.clone(), cx).await)
+ cx.spawn(async move |cx| {
+ run_evaluate_one(
+ &path,
+ args.skip_cache,
+ args.prompt_format,
+ args.use_expected_context,
+ app_state.clone(),
+ cx,
+ )
+ .await
+ })
});
let all_results = futures::future::try_join_all(all_tasks).await.unwrap();
@@ -51,35 +64,23 @@ pub async fn run_evaluate(
pub async fn run_evaluate_one(
example_path: &Path,
- re_run: bool,
+ skip_cache: bool,
+ prompt_format: PromptFormat,
+ use_expected_context: bool,
app_state: Arc<ZetaCliAppState>,
cx: &mut AsyncApp,
) -> Result<EvaluationResult> {
let example = NamedExample::load(&example_path).unwrap();
- let example_cache_path = CACHE_DIR.join(&example_path.file_name().unwrap());
-
- let predictions = if !re_run && example_cache_path.exists() {
- let file_contents = fs::read_to_string(&example_cache_path)?;
- let as_json = serde_json::from_str::<PredictionDetails>(&file_contents)?;
- log::debug!(
- "Loaded predictions from cache: {}",
- example_cache_path.display()
- );
- as_json
- } else {
- zeta2_predict(example.clone(), &app_state, cx)
- .await
- .unwrap()
- };
-
- if !example_cache_path.exists() {
- fs::create_dir_all(&*CACHE_DIR).unwrap();
- fs::write(
- example_cache_path,
- serde_json::to_string(&predictions).unwrap(),
- )
- .unwrap();
- }
+ let predictions = zeta2_predict(
+ example.clone(),
+ skip_cache,
+ prompt_format,
+ use_expected_context,
+ &app_state,
+ cx,
+ )
+ .await
+ .unwrap();
let evaluation_result = evaluate(&example.example, &predictions);
@@ -158,19 +158,20 @@ fn syntax_args_to_options(
}),
max_diagnostic_bytes: zeta2_args.max_diagnostic_bytes,
max_prompt_bytes: zeta2_args.max_prompt_bytes,
- prompt_format: zeta2_args.prompt_format.clone().into(),
+ prompt_format: zeta2_args.prompt_format.into(),
file_indexing_parallelism: zeta2_args.file_indexing_parallelism,
buffer_change_grouping_interval: Duration::ZERO,
}
}
-#[derive(clap::ValueEnum, Default, Debug, Clone)]
+#[derive(clap::ValueEnum, Default, Debug, Clone, Copy)]
enum PromptFormat {
MarkedExcerpt,
LabeledSections,
OnlySnippets,
#[default]
NumberedLines,
+ OldTextNewText,
}
impl Into<predict_edits_v3::PromptFormat> for PromptFormat {
@@ -180,6 +181,7 @@ impl Into<predict_edits_v3::PromptFormat> for PromptFormat {
Self::LabeledSections => predict_edits_v3::PromptFormat::LabeledSections,
Self::OnlySnippets => predict_edits_v3::PromptFormat::OnlySnippets,
Self::NumberedLines => predict_edits_v3::PromptFormat::NumLinesUniDiff,
+ Self::OldTextNewText => predict_edits_v3::PromptFormat::OldTextNewText,
}
}
}
@@ -2,7 +2,15 @@ use std::{env, path::PathBuf, sync::LazyLock};
static TARGET_DIR: LazyLock<PathBuf> = LazyLock::new(|| env::current_dir().unwrap().join("target"));
pub static CACHE_DIR: LazyLock<PathBuf> =
- LazyLock::new(|| TARGET_DIR.join("zeta-prediction-cache"));
+ LazyLock::new(|| TARGET_DIR.join("zeta-llm-response-cache"));
pub static REPOS_DIR: LazyLock<PathBuf> = LazyLock::new(|| TARGET_DIR.join("zeta-repos"));
pub static WORKTREES_DIR: LazyLock<PathBuf> = LazyLock::new(|| TARGET_DIR.join("zeta-worktrees"));
pub static LOGS_DIR: LazyLock<PathBuf> = LazyLock::new(|| TARGET_DIR.join("zeta-logs"));
+pub static LOGS_SEARCH_PROMPT: LazyLock<PathBuf> =
+ LazyLock::new(|| LOGS_DIR.join("search_prompt.md"));
+pub static LOGS_SEARCH_QUERIES: LazyLock<PathBuf> =
+ LazyLock::new(|| LOGS_DIR.join("search_queries.json"));
+pub static LOGS_PREDICTION_PROMPT: LazyLock<PathBuf> =
+ LazyLock::new(|| LOGS_DIR.join("prediction_prompt.md"));
+pub static LOGS_PREDICTION_RESPONSE: LazyLock<PathBuf> =
+ LazyLock::new(|| LOGS_DIR.join("prediction_response.md"));
@@ -1,27 +1,43 @@
-use crate::example::{ActualExcerpt, NamedExample};
+use crate::PromptFormat;
+use crate::example::{ActualExcerpt, ExpectedExcerpt, NamedExample};
use crate::headless::ZetaCliAppState;
-use crate::paths::LOGS_DIR;
+use crate::paths::{
+ CACHE_DIR, LOGS_DIR, LOGS_PREDICTION_PROMPT, LOGS_PREDICTION_RESPONSE, LOGS_SEARCH_PROMPT,
+ LOGS_SEARCH_QUERIES,
+};
use ::serde::Serialize;
use anyhow::{Result, anyhow};
use clap::Args;
+use collections::HashMap;
+use gpui::http_client::Url;
+use language::{Anchor, Buffer, Point};
+// use cloud_llm_client::predict_edits_v3::PromptFormat;
use cloud_zeta2_prompt::{CURSOR_MARKER, write_codeblock};
use futures::StreamExt as _;
-use gpui::{AppContext, AsyncApp};
+use gpui::{AppContext, AsyncApp, Entity};
use project::Project;
use serde::Deserialize;
use std::cell::Cell;
use std::fs;
use std::io::Write;
+use std::ops::Range;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use std::time::{Duration, Instant};
+use zeta2::LlmResponseCache;
#[derive(Debug, Args)]
pub struct PredictArguments {
- example_path: PathBuf,
+ #[arg(long, value_enum, default_value_t = PromptFormat::default())]
+ prompt_format: PromptFormat,
+ #[arg(long)]
+ use_expected_context: bool,
#[clap(long, short, value_enum, default_value_t = PredictionsOutputFormat::Md)]
format: PredictionsOutputFormat,
+ example_path: PathBuf,
+ #[clap(long)]
+ skip_cache: bool,
}
#[derive(clap::ValueEnum, Debug, Clone)]
@@ -30,14 +46,33 @@ pub enum PredictionsOutputFormat {
Md,
Diff,
}
+
pub async fn run_zeta2_predict(
args: PredictArguments,
app_state: &Arc<ZetaCliAppState>,
cx: &mut AsyncApp,
) {
let example = NamedExample::load(args.example_path).unwrap();
- let result = zeta2_predict(example, &app_state, cx).await.unwrap();
+ let result = zeta2_predict(
+ example,
+ args.skip_cache,
+ args.prompt_format,
+ args.use_expected_context,
+ &app_state,
+ cx,
+ )
+ .await
+ .unwrap();
result.write(args.format, std::io::stdout()).unwrap();
+
+ println!("## Logs\n");
+ println!("Search prompt: {}", LOGS_SEARCH_PROMPT.display());
+ println!("Search queries: {}", LOGS_SEARCH_QUERIES.display());
+ println!("Prediction prompt: {}", LOGS_PREDICTION_PROMPT.display());
+ println!(
+ "Prediction response: {}",
+ LOGS_PREDICTION_RESPONSE.display()
+ );
}
thread_local! {
@@ -46,6 +81,9 @@ thread_local! {
pub async fn zeta2_predict(
example: NamedExample,
+ skip_cache: bool,
+ prompt_format: PromptFormat,
+ use_expected_context: bool,
app_state: &Arc<ZetaCliAppState>,
cx: &mut AsyncApp,
) -> Result<PredictionDetails> {
@@ -88,6 +126,10 @@ pub async fn zeta2_predict(
let zeta = cx.update(|cx| zeta2::Zeta::global(&app_state.client, &app_state.user_store, cx))?;
+ zeta.update(cx, |zeta, _cx| {
+ zeta.with_llm_response_cache(Arc::new(Cache { skip_cache }));
+ })?;
+
cx.subscribe(&buffer_store, {
let project = project.clone();
move |_, event, cx| match event {
@@ -110,33 +152,31 @@ pub async fn zeta2_predict(
let debug_task = cx.background_spawn({
let result = result.clone();
async move {
- let mut context_retrieval_started_at = None;
- let mut context_retrieval_finished_at = None;
+ let mut start_time = None;
let mut search_queries_generated_at = None;
let mut search_queries_executed_at = None;
while let Some(event) = debug_rx.next().await {
match event {
zeta2::ZetaDebugInfo::ContextRetrievalStarted(info) => {
- context_retrieval_started_at = Some(info.timestamp);
- fs::write(LOGS_DIR.join("search_prompt.md"), &info.search_prompt)?;
+ start_time = Some(info.timestamp);
+ fs::write(&*LOGS_SEARCH_PROMPT, &info.search_prompt)?;
}
zeta2::ZetaDebugInfo::SearchQueriesGenerated(info) => {
search_queries_generated_at = Some(info.timestamp);
fs::write(
- LOGS_DIR.join("search_queries.json"),
+ &*LOGS_SEARCH_QUERIES,
serde_json::to_string_pretty(&info.search_queries).unwrap(),
)?;
}
zeta2::ZetaDebugInfo::SearchQueriesExecuted(info) => {
search_queries_executed_at = Some(info.timestamp);
}
- zeta2::ZetaDebugInfo::ContextRetrievalFinished(info) => {
- context_retrieval_finished_at = Some(info.timestamp);
- }
+ zeta2::ZetaDebugInfo::ContextRetrievalFinished(_info) => {}
zeta2::ZetaDebugInfo::EditPredictionRequested(request) => {
let prediction_started_at = Instant::now();
+ start_time.get_or_insert(prediction_started_at);
fs::write(
- LOGS_DIR.join("prediction_prompt.md"),
+ &*LOGS_PREDICTION_PROMPT,
&request.local_prompt.unwrap_or_default(),
)?;
@@ -170,19 +210,20 @@ pub async fn zeta2_predict(
let response = request.response_rx.await?.0.map_err(|err| anyhow!(err))?;
let response = zeta2::text_from_response(response).unwrap_or_default();
let prediction_finished_at = Instant::now();
- fs::write(LOGS_DIR.join("prediction_response.md"), &response)?;
+ fs::write(&*LOGS_PREDICTION_RESPONSE, &response)?;
let mut result = result.lock().unwrap();
- result.planning_search_time = search_queries_generated_at.unwrap()
- - context_retrieval_started_at.unwrap();
- result.running_search_time = search_queries_executed_at.unwrap()
- - search_queries_generated_at.unwrap();
- result.filtering_search_time = context_retrieval_finished_at.unwrap()
- - search_queries_executed_at.unwrap();
+ if !use_expected_context {
+ result.planning_search_time =
+ Some(search_queries_generated_at.unwrap() - start_time.unwrap());
+ result.running_search_time = Some(
+ search_queries_executed_at.unwrap()
+ - search_queries_generated_at.unwrap(),
+ );
+ }
result.prediction_time = prediction_finished_at - prediction_started_at;
- result.total_time =
- prediction_finished_at - context_retrieval_started_at.unwrap();
+ result.total_time = prediction_finished_at - start_time.unwrap();
break;
}
@@ -192,10 +233,42 @@ pub async fn zeta2_predict(
}
});
- zeta.update(cx, |zeta, cx| {
- zeta.refresh_context(project.clone(), cursor_buffer.clone(), cursor_anchor, cx)
- })?
- .await?;
+ zeta.update(cx, |zeta, _cx| {
+ let mut options = zeta.options().clone();
+ options.prompt_format = prompt_format.into();
+ zeta.set_options(options);
+ })?;
+
+ if use_expected_context {
+ let context_excerpts_tasks = example
+ .example
+ .expected_context
+ .iter()
+ .flat_map(|section| {
+ section.alternatives[0].excerpts.iter().map(|excerpt| {
+ resolve_context_entry(project.clone(), excerpt.clone(), cx.clone())
+ })
+ })
+ .collect::<Vec<_>>();
+ let context_excerpts_vec = futures::future::try_join_all(context_excerpts_tasks).await?;
+
+ let mut context_excerpts = HashMap::default();
+ for (buffer, mut excerpts) in context_excerpts_vec {
+ context_excerpts
+ .entry(buffer)
+ .or_insert(Vec::new())
+ .append(&mut excerpts);
+ }
+
+ zeta.update(cx, |zeta, _cx| {
+ zeta.set_context(project.clone(), context_excerpts)
+ })?;
+ } else {
+ zeta.update(cx, |zeta, cx| {
+ zeta.refresh_context(project.clone(), cursor_buffer.clone(), cursor_anchor, cx)
+ })?
+ .await?;
+ }
let prediction = zeta
.update(cx, |zeta, cx| {
@@ -223,14 +296,90 @@ pub async fn zeta2_predict(
anyhow::Ok(result)
}
+async fn resolve_context_entry(
+ project: Entity<Project>,
+ excerpt: ExpectedExcerpt,
+ mut cx: AsyncApp,
+) -> Result<(Entity<Buffer>, Vec<Range<Anchor>>)> {
+ let buffer = project
+ .update(&mut cx, |project, cx| {
+ let project_path = project.find_project_path(&excerpt.path, cx).unwrap();
+ project.open_buffer(project_path, cx)
+ })?
+ .await?;
+
+ let ranges = buffer.read_with(&mut cx, |buffer, _| {
+ let full_text = buffer.text();
+ let offset = full_text
+ .find(&excerpt.text)
+ .expect("Expected context not found");
+ let point = buffer.offset_to_point(offset);
+ excerpt
+ .required_lines
+ .iter()
+ .map(|line| {
+ let row = point.row + line.0;
+ let range = Point::new(row, 0)..Point::new(row + 1, 0);
+ buffer.anchor_after(range.start)..buffer.anchor_before(range.end)
+ })
+ .collect()
+ })?;
+
+ Ok((buffer, ranges))
+}
+
+struct Cache {
+ skip_cache: bool,
+}
+
+impl Cache {
+ fn path(key: u64) -> PathBuf {
+ CACHE_DIR.join(format!("{key:x}.json"))
+ }
+}
+
+impl LlmResponseCache for Cache {
+ fn get_key(&self, url: &Url, body: &str) -> u64 {
+ use collections::FxHasher;
+ use std::hash::{Hash, Hasher};
+
+ let mut hasher = FxHasher::default();
+ url.hash(&mut hasher);
+ body.hash(&mut hasher);
+ hasher.finish()
+ }
+
+ fn read_response(&self, key: u64) -> Option<String> {
+ let path = Cache::path(key);
+ if path.exists() {
+ if self.skip_cache {
+ log::info!("Skipping existing cached LLM response: {}", path.display());
+ None
+ } else {
+ log::info!("Using LLM response from cache: {}", path.display());
+ Some(fs::read_to_string(path).unwrap())
+ }
+ } else {
+ None
+ }
+ }
+
+ fn write_response(&self, key: u64, value: &str) {
+ fs::create_dir_all(&*CACHE_DIR).unwrap();
+
+ let path = Cache::path(key);
+ log::info!("Writing LLM response to cache: {}", path.display());
+ fs::write(path, value).unwrap();
+ }
+}
+
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct PredictionDetails {
pub diff: String,
pub excerpts: Vec<ActualExcerpt>,
pub excerpts_text: String, // TODO: contains the worktree root path. Drop this field and compute it on the fly
- pub planning_search_time: Duration,
- pub filtering_search_time: Duration,
- pub running_search_time: Duration,
+ pub planning_search_time: Option<Duration>,
+ pub running_search_time: Option<Duration>,
pub prediction_time: Duration,
pub total_time: Duration,
}
@@ -247,8 +396,7 @@ impl PredictionDetails {
}
pub fn to_markdown(&self) -> String {
- let inference_time =
- self.planning_search_time + self.filtering_search_time + self.prediction_time;
+ let inference_time = self.planning_search_time.unwrap_or_default() + self.prediction_time;
format!(
"## Excerpts\n\n\
@@ -258,16 +406,14 @@ impl PredictionDetails {
## Time\n\n\
Planning searches: {}ms\n\
Running searches: {}ms\n\
- Filtering context results: {}ms\n\
Making Prediction: {}ms\n\n\
-------------------\n\n\
Total: {}ms\n\
Inference: {}ms ({:.2}%)\n",
self.excerpts_text,
self.diff,
- self.planning_search_time.as_millis(),
- self.running_search_time.as_millis(),
- self.filtering_search_time.as_millis(),
+ self.planning_search_time.unwrap_or_default().as_millis(),
+ self.running_search_time.unwrap_or_default().as_millis(),
self.prediction_time.as_millis(),
self.total_time.as_millis(),
inference_time.as_millis(),
@@ -30,16 +30,17 @@ pub fn init_test() {
}
}
-pub fn init_test_with(filter: &str) {
- if try_init(Some(filter.to_owned())).is_ok() {
- init_output_stdout();
- }
-}
-
fn get_env_config() -> Option<String> {
std::env::var("ZED_LOG")
.or_else(|_| std::env::var("RUST_LOG"))
.ok()
+ .or_else(|| {
+ if std::env::var("CI").is_ok() {
+ Some("info".to_owned())
+ } else {
+ None
+ }
+ })
}
pub fn process_env(filter: Option<String>) {
@@ -587,7 +587,7 @@ These routing controls let you fineβtune cost, capability, and reliability tra
### Vercel v0 {#vercel-v0}
-[Vercel v0](https://vercel.com/docs/v0/api) is an expert model for generating full-stack apps, with framework-aware completions optimized for modern stacks like Next.js and Vercel.
+[Vercel v0](https://v0.app/docs/api/model) is an expert model for generating full-stack apps, with framework-aware completions optimized for modern stacks like Next.js and Vercel.
It supports text and image inputs and provides fast streaming responses.
The v0 models are [OpenAI-compatible models](/#openai-api-compatible), but Vercel is listed as first-class provider in the panel's settings view.
@@ -213,7 +213,7 @@ Note: This setting has no effect in Vim mode, as rewrap is already allowed every
## Auto Install extensions
- Description: Define extensions to be autoinstalled or never be installed.
-- Setting: `auto_install_extension`
+- Setting: `auto_install_extensions`
- Default: `{ "html": true }`
**Options**
@@ -2519,11 +2519,12 @@ Unspecified values have a `false` value, hints won't be toggled if all the modif
"path": "~",
"hour_format": "hour12"
}
+
```
### Path
-- Description: The path of the directory where journal entries are stored.
+- Description: The path of the directory where journal entries are stored. If an invalid path is specified, the journal will fall back to using `~` (the home directory).
- Setting: `path`
- Default: `~`
@@ -3184,13 +3185,53 @@ Non-negative `integer` values
```json [settings]
"search": {
+ "button": true,
"whole_word": false,
"case_sensitive": false,
"include_ignored": false,
- "regex": false
+ "regex": false,
+ "center_on_match": false
},
```
+### Button
+
+- Description: Whether to show the project search button in the status bar.
+- Setting: `button`
+- Default: `true`
+
+### Whole Word
+
+- Description: Whether to only match on whole words.
+- Setting: `whole_word`
+- Default: `false`
+
+### Case Sensitive
+
+- Description: Whether to match case sensitively. This setting affects both
+ searches and editor actions like "Select Next Occurrence", "Select Previous
+ Occurrence", and "Select All Occurrences".
+- Setting: `case_sensitive`
+- Default: `false`
+
+### Include Ignore
+
+- Description: Whether to include gitignored files in search results.
+- Setting: `include_ignored`
+- Default: `false`
+
+### Regex
+
+- Description: Whether to interpret the search query as a regular expression.
+- Setting: `regex`
+- Default: `false`
+
+### Center On Match
+
+- Description: Whether to center the cursor on each search match when navigating.
+- Setting: `center_on_match`
+- Default: `false`
+
## Search Wrap
- Description: If `search_wrap` is disabled, search result do not wrap around the end of the file
@@ -10,7 +10,7 @@ Release Notes:
- N/A _or_ Added/Fixed/Improved ...
```
-On Wednesdays, we run a [`get-preview-channel-changes`](https://github.com/zed-industries/zed/blob/main/script/get-preview-channel-changes) script that scrapes `Release Notes` lines from pull requests landing in preview, as documented in our [Release](https://zed.dev/docs/development/releases) docs.
+On Wednesdays, we run a [`get-preview-channel-changes`](https://github.com/zed-industries/zed/blob/main/script/get-preview-channel-changes) script that scrapes `Release Notes` lines from pull requests landing in preview, as documented in our [Release](https://zed.dev/docs/development/release-notes) docs.
The script outputs everything below the `Release Notes` line, including additional data such as the pull request author (if not a Zed team member) and a link to the pull request.
If you use `N/A`, the script skips your pull request entirely.
@@ -53,7 +53,7 @@ If instead you wanted to restrict yourself only to [Zed Language-Specific Docume
### Implicit Wildcards
-When using the "Include" / "Exclude" filters on a Project Search each glob is wrapped in implicit wildcards. For example to exclude any files with license in the path or filename from your search just type type `license` in the exclude box. Behind the scenes Zed transforms `license` to `**license**`. This means that files named `license.*`, `*.license` or inside a `license` subdirectory will all be filtered out. This enables users to easily filter for `*.ts` without having to remember to type `**/*.ts` every time.
+When using the "Include" / "Exclude" filters on a Project Search each glob is wrapped in implicit wildcards. For example to exclude any files with license in the path or filename from your search just type `license` in the exclude box. Behind the scenes Zed transforms `license` to `**license**`. This means that files named `license.*`, `*.license` or inside a `license` subdirectory will all be filtered out. This enables users to easily filter for `*.ts` without having to remember to type `**/*.ts` every time.
Alternatively, if in your Zed settings you wanted a [`file_types`](./configuring-zed.md#file-types) override which only applied to a certain directory you must explicitly include the wildcard globs. For example, if you had a directory of template files with the `html` extension that you wanted to recognize as Jinja2 template you could use the following:
@@ -78,7 +78,7 @@ To get completions for `deno.json` or `package.json` you can add the following t
"fileMatch": [
"package.json"
],
- "url": "http://json.schemastore.org/package"
+ "url": "https://www.schemastore.org/package"
}
]
}
@@ -33,4 +33,4 @@ Once you have the cli, simply from a terminal, navigate to your project and run
zed .
```
-Voila! You should have Zed running with OCaml support, no additional setup required.
+VoilΓ ! You should have Zed running with OCaml support, no additional setup required.
@@ -19,7 +19,7 @@ You can configure various [yaml-language-server settings](https://github.com/red
"singleQuote": true
},
"schemas": {
- "http://json.schemastore.org/composer": ["/*"],
+ "https://getcomposer.org/schema.json": ["/*"],
"../relative/path/schema.json": ["/config*.yaml"]
}
}
@@ -70,7 +70,7 @@ By default yaml-language-server will attempt to determine the correct schema for
You can override any auto-detected schema via the `schemas` settings key (demonstrated above) or by providing an [inlined schema](https://github.com/redhat-developer/yaml-language-server#using-inlined-schema) reference via a modeline comment at the top of your yaml file:
```yaml
-# yaml-language-server: $schema=https://json.schemastore.org/github-action.json
+# yaml-language-server: $schema=https://www.schemastore.org/github-action.json
name: Issue Assignment
on:
issues:
@@ -72,9 +72,10 @@ If you'd prefer, you can install Zed by downloading our pre-built .tar.gz. This
Download the `.tar.gz` file:
-- [zed-linux-x86_64.tar.gz](https://zed.dev/api/releases/stable/latest/zed-linux-x86_64.tar.gz) ([preview](https://zed.dev/api/releases/preview/latest/zed-linux-x86_64.tar.gz))
-- [zed-linux-aarch64.tar.gz](https://zed.dev/api/releases/stable/latest/zed-linux-aarch64.tar.gz)
- ([preview](https://zed.dev/api/releases/preview/latest/zed-linux-aarch64.tar.gz))
+- [zed-linux-x86_64.tar.gz](https://cloud.zed.dev/releases/stable/latest/download?asset=zed&arch=x86_64&os=linux&source=docs)
+ ([preview](https://cloud.zed.dev/releases/preview/latest/download?asset=zed&arch=x86_64&os=linux&source=docs))
+- [zed-linux-aarch64.tar.gz](https://cloud.zed.dev/releases/stable/latest/download?asset=zed&arch=aarch64&os=linux&source=docs)
+ ([preview](https://cloud.zed.dev/releases/preview/latest/download?asset=zed&arch=aarch64&os=linux&source=docs))
Then ensure that the `zed` binary in the tarball is on your path. The easiest way is to unpack the tarball and create a symlink:
@@ -110,4 +110,4 @@ If you encounter issues during uninstallation:
- **Linux**: If the uninstall script fails, check the error message and consider manual removal of the directories listed above.
- **All platforms**: If you want to start fresh while keeping Zed installed, you can delete the configuration directories instead of uninstalling the application entirely.
-For additional help, see our [Linux-specific documentation](./linux.md) or visit the [Zed community](https://zed.dev/community).
+For additional help, see our [Linux-specific documentation](./linux.md) or visit the [Zed community](https://zed.dev/community-links).
@@ -218,6 +218,10 @@ TBD: Centered layout related settings
"active_line_width": 1, // Width of active guide in pixels [1-10]
"coloring": "fixed", // disabled, fixed, indent_aware
"background_coloring": "disabled" // disabled, indent_aware
+ },
+
+ "sticky_scroll": {
+ "enabled": false // Whether to stick scopes to the top of the editor. Disabled by default.
}
```
@@ -22,7 +22,7 @@ Build the application bundle for macOS.
Options:
-d Compile in debug mode
-o Open dir with the resulting DMG or launch the app itself in local mode.
- -i Install the resulting DMG into /Applications in local mode. Noop without -l.
+ -i Install the resulting DMG into /Applications.
-h Display this help and exit.
"
}
@@ -209,16 +209,6 @@ function sign_app_binaries() {
codesign --force --deep --entitlements "${app_path}/Contents/Resources/zed.entitlements" --sign ${MACOS_SIGNING_KEY:- -} "${app_path}" -v
fi
- if [[ "$target_dir" = "debug" ]]; then
- if [ "$open_result" = true ]; then
- open "$app_path"
- else
- echo "Created application bundle:"
- echo "$app_path"
- fi
- exit 0
- fi
-
bundle_name=$(basename "$app_path")
if [ "$local_install" = true ]; then
@@ -229,6 +219,16 @@ function sign_app_binaries() {
echo "Opening /Applications/$bundle_name"
open "/Applications/$bundle_name"
fi
+ elif [ "$open_result" = true ]; then
+ open "$app_path"
+ fi
+
+ if [[ "$target_dir" = "debug" ]]; then
+ echo "Debug build detected - skipping DMG creation and signing"
+ if [ "$local_install" = false ]; then
+ echo "Created application bundle:"
+ echo "$app_path"
+ fi
else
dmg_target_directory="target/${target_triple}/${target_dir}"
dmg_source_directory="${dmg_target_directory}/dmg"
@@ -0,0 +1,60 @@
+#!/bin/bash
+
+# Check if ./target/wasi-sdk exists
+if [ ! -d "./target/wasi-sdk" ]; then
+ echo "WASI SDK not found, downloading v25..."
+
+ # Determine OS and architecture
+ OS=$(uname -s | tr '[:upper:]' '[:lower:]')
+ ARCH=$(uname -m)
+
+ # Map architecture names to WASI SDK format
+ case $ARCH in
+ x86_64)
+ ARCH="x86_64"
+ ;;
+ arm64|aarch64)
+ ARCH="arm64"
+ ;;
+ *)
+ echo "Unsupported architecture: $ARCH"
+ exit 1
+ ;;
+ esac
+
+ # Map OS names to WASI SDK format
+ case $OS in
+ darwin)
+ OS="macos"
+ ;;
+ linux)
+ OS="linux"
+ ;;
+ mingw*|msys*|cygwin*)
+ OS="mingw"
+ ;;
+ *)
+ echo "Unsupported OS: $OS"
+ exit 1
+ ;;
+ esac
+
+ # Construct download URL
+ WASI_SDK_VERSION="25"
+ WASI_SDK_URL="https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${WASI_SDK_VERSION}/wasi-sdk-${WASI_SDK_VERSION}.0-${ARCH}-${OS}.tar.gz"
+
+ echo "Downloading from: $WASI_SDK_URL"
+
+ # Create target directory if it doesn't exist
+ mkdir -p ./target
+
+ # Download and extract
+ curl -L "$WASI_SDK_URL" | tar -xz -C ./target
+
+ # Rename the extracted directory to wasi-sdk
+ mv "./target/wasi-sdk-${WASI_SDK_VERSION}.0-${ARCH}-${OS}" "./target/wasi-sdk"
+
+ echo "WASI SDK v25 installed successfully"
+else
+ echo "WASI SDK already exists at ./target/wasi-sdk"
+fi
@@ -18,4 +18,4 @@ case $channel in
;;
esac
-curl -s "https://zed.dev/api/releases/latest?asset=zed&os=macos&arch=aarch64$query" | jq -r .version
+curl -s "https://cloud.zed.dev/releases/$channel/latest/asset?asset=zed&os=macos&arch=aarch64" | jq -r .version
@@ -82,7 +82,7 @@ linux() {
cp "$ZED_BUNDLE_PATH" "$temp/zed-linux-$arch.tar.gz"
else
echo "Downloading Zed"
- curl "https://zed.dev/api/releases/$channel/latest/zed-linux-$arch.tar.gz" > "$temp/zed-linux-$arch.tar.gz"
+ curl "https://cloud.zed.dev/releases/$channel/latest/download?asset=zed&arch=$arch&os=linux&source=install.sh" > "$temp/zed-linux-$arch.tar.gz"
fi
suffix=""
@@ -135,7 +135,7 @@ linux() {
macos() {
echo "Downloading Zed"
- curl "https://zed.dev/api/releases/$channel/latest/Zed-$arch.dmg" > "$temp/Zed-$arch.dmg"
+ curl "https://cloud.zed.dev/releases/$channel/latest/download?asset=zed&os=macos&arch=$arch&source=install.sh" > "$temp/Zed-$arch.dmg"
hdiutil attach -quiet "$temp/Zed-$arch.dmg" -mountpoint "$temp/mount"
app="$(cd "$temp/mount/"; echo *.app)"
echo "Installing $app"
@@ -2,4 +2,8 @@
set -euxo pipefail
-cargo nextest run --workspace --no-fail-fast --features unit-eval --no-capture -E 'test(::eval_)'
+if [ -n "${UNIT_EVAL_COMMIT:-}" ]; then
+ git checkout "$UNIT_EVAL_COMMIT"
+fi
+
+GPUI_TEST_TIMEOUT=1500 cargo nextest run --workspace --no-fail-fast --features unit-eval --no-capture -E 'test(::eval_)'
@@ -33,6 +33,10 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> {
("cherry_pick.yml", cherry_pick::cherry_pick()),
("compare_perf.yml", compare_perf::compare_perf()),
("run_unit_evals.yml", run_agent_evals::run_unit_evals()),
+ (
+ "run_cron_unit_evals.yml",
+ run_agent_evals::run_cron_unit_evals(),
+ ),
("run_agent_evals.yml", run_agent_evals::run_agent_evals()),
("after_release.yml", after_release::after_release()),
];
@@ -1,63 +1,89 @@
-use gh_workflow::{
- Event, Expression, Job, PullRequest, PullRequestType, Run, Schedule, Step, Use, Workflow,
- WorkflowDispatch,
-};
+use gh_workflow::{Event, Expression, Job, Run, Schedule, Step, Use, Workflow, WorkflowDispatch};
use crate::tasks::workflows::{
runners::{self, Platform},
steps::{self, FluentBuilder as _, NamedJob, named, setup_cargo_config},
- vars,
+ vars::{self, Input},
};
pub(crate) fn run_agent_evals() -> Workflow {
let agent_evals = agent_evals();
+ let model_name = Input::string("model_name", None);
named::workflow()
- .on(Event::default()
- .schedule([Schedule::default().cron("0 0 * * *")])
- .pull_request(PullRequest::default().add_branch("**").types([
- PullRequestType::Synchronize,
- PullRequestType::Reopened,
- PullRequestType::Labeled,
- ]))
- .workflow_dispatch(WorkflowDispatch::default()))
+ .on(Event::default().workflow_dispatch(
+ WorkflowDispatch::default().add_input(model_name.name, model_name.input()),
+ ))
.concurrency(vars::one_workflow_per_non_main_branch())
.add_env(("CARGO_TERM_COLOR", "always"))
.add_env(("CARGO_INCREMENTAL", 0))
.add_env(("RUST_BACKTRACE", 1))
.add_env(("ANTHROPIC_API_KEY", vars::ANTHROPIC_API_KEY))
+ .add_env(("OPENAI_API_KEY", vars::OPENAI_API_KEY))
+ .add_env(("GOOGLE_AI_API_KEY", vars::GOOGLE_AI_API_KEY))
+ .add_env(("GOOGLE_CLOUD_PROJECT", vars::GOOGLE_CLOUD_PROJECT))
.add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED))
.add_env(("ZED_EVAL_TELEMETRY", 1))
+ .add_env(("MODEL_NAME", model_name.to_string()))
.add_job(agent_evals.name, agent_evals.job)
}
+pub(crate) fn run_unit_evals() -> Workflow {
+ let model_name = Input::string("model_name", None);
+ let commit_sha = Input::string("commit_sha", None);
+
+ let unit_evals = named::job(unit_evals(Some(&commit_sha)));
+
+ named::workflow()
+ .name("run_unit_evals")
+ .on(Event::default().workflow_dispatch(
+ WorkflowDispatch::default()
+ .add_input(model_name.name, model_name.input())
+ .add_input(commit_sha.name, commit_sha.input()),
+ ))
+ .concurrency(vars::one_workflow_per_non_main_branch())
+ .add_env(("CARGO_TERM_COLOR", "always"))
+ .add_env(("CARGO_INCREMENTAL", 0))
+ .add_env(("RUST_BACKTRACE", 1))
+ .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED))
+ .add_env(("ZED_EVAL_TELEMETRY", 1))
+ .add_env(("MODEL_NAME", model_name.to_string()))
+ .add_job(unit_evals.name, unit_evals.job)
+}
+
+fn add_api_keys(step: Step<Run>) -> Step<Run> {
+ step.add_env(("ANTHROPIC_API_KEY", vars::ANTHROPIC_API_KEY))
+ .add_env(("OPENAI_API_KEY", vars::OPENAI_API_KEY))
+ .add_env(("GOOGLE_AI_API_KEY", vars::GOOGLE_AI_API_KEY))
+ .add_env(("GOOGLE_CLOUD_PROJECT", vars::GOOGLE_CLOUD_PROJECT))
+}
+
fn agent_evals() -> NamedJob {
fn run_eval() -> Step<Run> {
- named::bash("cargo run --package=eval -- --repetitions=8 --concurrency=1")
+ named::bash(
+ "cargo run --package=eval -- --repetitions=8 --concurrency=1 --model \"${MODEL_NAME}\"",
+ )
}
named::job(
Job::default()
- .cond(Expression::new(indoc::indoc!{r#"
- github.repository_owner == 'zed-industries' &&
- (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
- "#}))
.runs_on(runners::LINUX_DEFAULT)
- .timeout_minutes(60_u32)
+ .timeout_minutes(60_u32 * 10)
.add_step(steps::checkout_repo())
.add_step(steps::cache_rust_dependencies_namespace())
.map(steps::install_linux_dependencies)
.add_step(setup_cargo_config(Platform::Linux))
.add_step(steps::script("cargo build --package=eval"))
- .add_step(run_eval())
- .add_step(steps::cleanup_cargo_config(Platform::Linux))
+ .add_step(add_api_keys(run_eval()))
+ .add_step(steps::cleanup_cargo_config(Platform::Linux)),
)
}
-pub(crate) fn run_unit_evals() -> Workflow {
- let unit_evals = unit_evals();
+pub(crate) fn run_cron_unit_evals() -> Workflow {
+ let unit_evals = cron_unit_evals();
named::workflow()
+ .name("run_cron_unit_evals")
.on(Event::default()
.schedule([
// GitHub might drop jobs at busy times, so we choose a random time in the middle of the night.
@@ -72,7 +98,7 @@ pub(crate) fn run_unit_evals() -> Workflow {
.add_job(unit_evals.name, unit_evals.job)
}
-fn unit_evals() -> NamedJob {
+fn cron_unit_evals() -> NamedJob {
fn send_failure_to_slack() -> Step<Use> {
named::uses(
"slackapi",
@@ -88,20 +114,39 @@ fn unit_evals() -> NamedJob {
"#}))
}
- named::job(
- Job::default()
- .runs_on(runners::LINUX_DEFAULT)
- .add_step(steps::checkout_repo())
- .add_step(steps::setup_cargo_config(Platform::Linux))
- .add_step(steps::cache_rust_dependencies_namespace())
- .map(steps::install_linux_dependencies)
- .add_step(steps::cargo_install_nextest(Platform::Linux))
- .add_step(steps::clear_target_dir_if_large(Platform::Linux))
- .add_step(
- steps::script("./script/run-unit-evals")
- .add_env(("ANTHROPIC_API_KEY", vars::ANTHROPIC_API_KEY)),
- )
- .add_step(send_failure_to_slack())
- .add_step(steps::cleanup_cargo_config(Platform::Linux)),
- )
+ named::job(unit_evals(None).add_step(send_failure_to_slack()))
+}
+
+fn unit_evals(commit: Option<&Input>) -> Job {
+ fn send_failure_to_slack() -> Step<Use> {
+ named::uses(
+ "slackapi",
+ "slack-github-action",
+ "b0fa283ad8fea605de13dc3f449259339835fc52",
+ )
+ .if_condition(Expression::new("${{ failure() }}"))
+ .add_with(("method", "chat.postMessage"))
+ .add_with(("token", vars::SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN))
+ .add_with(("payload", indoc::indoc!{r#"
+ channel: C04UDRNNJFQ
+ text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
+ "#}))
+ }
+
+ let script_step = add_api_keys(steps::script("./script/run-unit-evals"));
+
+ Job::default()
+ .runs_on(runners::LINUX_DEFAULT)
+ .add_step(steps::checkout_repo())
+ .add_step(steps::setup_cargo_config(Platform::Linux))
+ .add_step(steps::cache_rust_dependencies_namespace())
+ .map(steps::install_linux_dependencies)
+ .add_step(steps::cargo_install_nextest(Platform::Linux))
+ .add_step(steps::clear_target_dir_if_large(Platform::Linux))
+ .add_step(match commit {
+ Some(commit) => script_step.add_env(("UNIT_EVAL_COMMIT", commit)),
+ None => script_step,
+ })
+ .add_step(send_failure_to_slack())
+ .add_step(steps::cleanup_cargo_config(Platform::Linux))
}
@@ -292,8 +292,8 @@ fn check_workspace_binaries() -> NamedJob {
.runs_on(runners::LINUX_LARGE)
.add_step(steps::checkout_repo())
.add_step(steps::setup_cargo_config(Platform::Linux))
- .map(steps::install_linux_dependencies)
.add_step(steps::cache_rust_dependencies_namespace())
+ .map(steps::install_linux_dependencies)
.add_step(steps::script("cargo build -p collab"))
.add_step(steps::script("cargo build --workspace --bins --examples"))
.add_step(steps::cleanup_cargo_config(Platform::Linux)),
@@ -312,13 +312,13 @@ pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
.runs_on(runner)
.add_step(steps::checkout_repo())
.add_step(steps::setup_cargo_config(platform))
+ .when(platform == Platform::Linux, |this| {
+ this.add_step(steps::cache_rust_dependencies_namespace())
+ })
.when(
platform == Platform::Linux,
steps::install_linux_dependencies,
)
- .when(platform == Platform::Linux, |this| {
- this.add_step(steps::cache_rust_dependencies_namespace())
- })
.add_step(steps::setup_node())
.add_step(steps::clippy(platform))
.add_step(steps::cargo_install_nextest(platform))
@@ -113,8 +113,14 @@ fn install_mold() -> Step<Run> {
named::bash("./script/install-mold")
}
+fn download_wasi_sdk() -> Step<Run> {
+ named::bash("./script/download-wasi-sdk")
+}
+
pub(crate) fn install_linux_dependencies(job: Job) -> Job {
- job.add_step(setup_linux()).add_step(install_mold())
+ job.add_step(setup_linux())
+ .add_step(install_mold())
+ .add_step(download_wasi_sdk())
}
pub fn script(name: &str) -> Step<Run> {
@@ -17,6 +17,9 @@ macro_rules! var {
}
secret!(ANTHROPIC_API_KEY);
+secret!(OPENAI_API_KEY);
+secret!(GOOGLE_AI_API_KEY);
+secret!(GOOGLE_CLOUD_PROJECT);
secret!(APPLE_NOTARIZATION_ISSUER_ID);
secret!(APPLE_NOTARIZATION_KEY);
secret!(APPLE_NOTARIZATION_KEY_ID);