diff --git a/.github/workflows/autofix_pr.yml b/.github/workflows/autofix_pr.yml
index f055c078cf4f814e342697e311ad5660f68f4624..717c5e2fa5e3c35f3ff33d176f73022e7a0c95d4 100644
--- a/.github/workflows/autofix_pr.yml
+++ b/.github/workflows/autofix_pr.yml
@@ -97,6 +97,8 @@ jobs:
with:
app-id: ${{ secrets.ZED_ZIPPY_APP_ID }}
private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
+ permission-contents: write
+ permission-workflows: write
- name: steps::checkout_repo
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd
with:
diff --git a/.github/workflows/cherry_pick.yml b/.github/workflows/cherry_pick.yml
index 4a3bd0e643e027e7feaeac4760797e2a1fb16e11..ed0800dc5bbf1ec59182e9d24753e9b5112c4d13 100644
--- a/.github/workflows/cherry_pick.yml
+++ b/.github/workflows/cherry_pick.yml
@@ -35,6 +35,9 @@ jobs:
with:
app-id: ${{ secrets.ZED_ZIPPY_APP_ID }}
private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
+ permission-contents: write
+ permission-workflows: write
+ permission-pull-requests: write
- name: cherry_pick::run_cherry_pick::cherry_pick
run: ./script/cherry-pick "$BRANCH" "$COMMIT" "$CHANNEL"
env:
diff --git a/.github/workflows/compliance_check.yml b/.github/workflows/compliance_check.yml
new file mode 100644
index 0000000000000000000000000000000000000000..f09c460c233b04e78df01e7828b4def737dec16e
--- /dev/null
+++ b/.github/workflows/compliance_check.yml
@@ -0,0 +1,55 @@
+# Generated from xtask::workflows::compliance_check
+# Rebuild with `cargo xtask workflows`.
+name: compliance_check
+env:
+ CARGO_TERM_COLOR: always
+on:
+ schedule:
+ - cron: 30 17 * * 2
+jobs:
+ scheduled_compliance_check:
+ if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
+ runs-on: namespace-profile-2x4-ubuntu-2404
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd
+ with:
+ clean: false
+ fetch-depth: 0
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9
+ with:
+ cache: rust
+ path: ~/.rustup
+ - id: determine-version
+ name: compliance_check::scheduled_compliance_check
+ run: |
+ VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' crates/zed/Cargo.toml | tr -d '[:space:]')
+ if [ -z "$VERSION" ]; then
+ echo "Could not determine version from crates/zed/Cargo.toml"
+ exit 1
+ fi
+ TAG="v${VERSION}-pre"
+ echo "Checking compliance for $TAG"
+ echo "tag=$TAG" >> "$GITHUB_OUTPUT"
+ - id: run-compliance-check
+ name: compliance_check::scheduled_compliance_check::run_compliance_check
+ run: cargo xtask compliance "$LATEST_TAG" --branch main --report-path target/compliance-report
+ env:
+ LATEST_TAG: ${{ steps.determine-version.outputs.tag }}
+ GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
+ GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
+ - name: compliance_check::scheduled_compliance_check::send_failure_slack_notification
+ if: failure()
+ run: |
+ MESSAGE="⚠️ Scheduled compliance check failed for upcoming preview release $LATEST_TAG: There are PRs with missing reviews."
+
+ curl -X POST -H 'Content-type: application/json' \
+ --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
+ "$SLACK_WEBHOOK"
+ env:
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+ LATEST_TAG: ${{ steps.determine-version.outputs.tag }}
+defaults:
+ run:
+ shell: bash -euxo pipefail {0}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 35efafcfcd97c0139f8225ce7b15a05946c385ad..1401144ab3abda17dd4f526edd42166d37a47a49 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -293,6 +293,51 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 60
+ compliance_check:
+ if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
+ runs-on: namespace-profile-16x32-ubuntu-2204
+ env:
+ COMPLIANCE_FILE_PATH: compliance.md
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd
+ with:
+ clean: false
+ fetch-depth: 0
+ ref: ${{ github.ref }}
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9
+ with:
+ cache: rust
+ path: ~/.rustup
+ - id: run-compliance-check
+ name: release::compliance_check::run_compliance_check
+ run: cargo xtask compliance "$GITHUB_REF_NAME" --report-path "$COMPLIANCE_FILE_OUTPUT"
+ env:
+ GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
+ GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
+ - name: release::compliance_check::send_compliance_slack_notification
+ if: always()
+ run: |
+ if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
+ STATUS="✅ Compliance check passed for $GITHUB_REF_NAME"
+ else
+ STATUS="❌ Compliance check failed for $GITHUB_REF_NAME"
+ fi
+
+ REPORT_CONTENT=""
+ if [ -f "$COMPLIANCE_FILE_OUTPUT" ]; then
+ REPORT_CONTENT=$(cat "$REPORT_FILE")
+ fi
+
+ MESSAGE=$(printf "%s\n\n%s" "$STATUS" "$REPORT_CONTENT")
+
+ curl -X POST -H 'Content-type: application/json' \
+ --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
+ "$SLACK_WEBHOOK"
+ env:
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+ COMPLIANCE_OUTCOME: ${{ steps.run-compliance-check.outcome }}
bundle_linux_aarch64:
needs:
- run_tests_linux
@@ -613,6 +658,45 @@ jobs:
echo "All expected assets are present in the release."
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ - name: steps::checkout_repo
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd
+ with:
+ clean: false
+ fetch-depth: 0
+ ref: ${{ github.ref }}
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9
+ with:
+ cache: rust
+ path: ~/.rustup
+ - id: run-post-upload-compliance-check
+ name: release::validate_release_assets::run_post_upload_compliance_check
+ run: cargo xtask compliance "$GITHUB_REF_NAME" --report-path target/compliance-report
+ env:
+ GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
+ GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
+ - name: release::validate_release_assets::send_post_upload_compliance_notification
+ if: always()
+ run: |
+ if [ -z "$COMPLIANCE_OUTCOME" ] || [ "$COMPLIANCE_OUTCOME" == "skipped" ]; then
+ echo "Compliance check was skipped, not sending notification"
+ exit 0
+ fi
+
+ TAG="$GITHUB_REF_NAME"
+
+ if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
+ MESSAGE="✅ Post-upload compliance re-check passed for $TAG"
+ else
+ MESSAGE="❌ Post-upload compliance re-check failed for $TAG"
+ fi
+
+ curl -X POST -H 'Content-type: application/json' \
+ --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
+ "$SLACK_WEBHOOK"
+ env:
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+ COMPLIANCE_OUTCOME: ${{ steps.run-post-upload-compliance-check.outcome }}
auto_release_preview:
needs:
- validate_release_assets
diff --git a/.zed/tasks.json b/.zed/tasks.json
index b6a9d9f4cd794d205d028f12bd8300e70f988f55..be2ccefedca46406713d9abf116c5efa9390fdb8 100644
--- a/.zed/tasks.json
+++ b/.zed/tasks.json
@@ -4,13 +4,13 @@
"command": "./script/clippy",
"args": [],
"allow_concurrent_runs": true,
- "use_new_terminal": false
+ "use_new_terminal": false,
},
{
"label": "cargo run --profile release-fast",
"command": "cargo",
"args": ["run", "--profile", "release-fast"],
"allow_concurrent_runs": true,
- "use_new_terminal": false
- }
+ "use_new_terminal": false,
+ },
]
diff --git a/Cargo.lock b/Cargo.lock
index d88eff40b621a72a3216f1da56e5917706655d75..97412711a55667a4976a35313eb6c0388acc74ef 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -15,7 +15,7 @@ dependencies = [
"collections",
"env_logger 0.11.8",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"image",
"indoc",
@@ -75,7 +75,7 @@ dependencies = [
"collections",
"ctor",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"language",
"log",
@@ -100,7 +100,7 @@ dependencies = [
"editor",
"extension_host",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"language",
"project",
@@ -163,7 +163,7 @@ dependencies = [
"eval_utils",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"gpui",
"gpui_tokio",
@@ -227,7 +227,7 @@ dependencies = [
"async-broadcast",
"async-trait",
"derive_more",
- "futures 0.3.31",
+ "futures 0.3.32",
"log",
"serde",
"serde_json",
@@ -260,11 +260,10 @@ dependencies = [
"chrono",
"client",
"collections",
- "credentials_provider",
"env_logger 0.11.8",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"google_ai",
"gpui",
"gpui_tokio",
@@ -289,6 +288,7 @@ dependencies = [
"util",
"uuid",
"watch",
+ "zed_credentials_provider",
]
[[package]]
@@ -344,7 +344,7 @@ dependencies = [
"feature_flags",
"file_icons",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"git",
"gpui",
@@ -629,7 +629,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"chrono",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -677,6 +677,15 @@ dependencies = [
"derive_arbitrary",
]
+[[package]]
+name = "arc-swap"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a07d1f37ff60921c83bdfc7407723bdefe89b44b98a9b772f225c8f9d67141a6"
+dependencies = [
+ "rustversion",
+]
+
[[package]]
name = "arg_enum_proc_macro"
version = "0.3.4"
@@ -750,7 +759,7 @@ name = "askpass"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"log",
"net",
@@ -945,7 +954,7 @@ name = "async-pipe"
version = "0.1.3"
source = "git+https://github.com/zed-industries/async-pipe-rs?rev=82d00a04211cf4e1236029aa03e6b6ce2a74c553#82d00a04211cf4e1236029aa03e6b6ce2a74c553"
dependencies = [
- "futures 0.3.31",
+ "futures 0.3.32",
"log",
]
@@ -1183,7 +1192,7 @@ dependencies = [
"clock",
"ctor",
"db",
- "futures 0.3.31",
+ "futures 0.3.32",
"futures-lite 1.13.0",
"gpui",
"http_client",
@@ -1862,7 +1871,7 @@ dependencies = [
"anyhow",
"aws-sdk-bedrockruntime",
"aws-smithy-types",
- "futures 0.3.31",
+ "futures 0.3.32",
"schemars",
"serde",
"serde_json",
@@ -2151,7 +2160,7 @@ version = "0.1.0"
dependencies = [
"clock",
"ctor",
- "futures 0.3.31",
+ "futures 0.3.32",
"git2",
"gpui",
"language",
@@ -2348,7 +2357,7 @@ dependencies = [
"collections",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_tokio",
"language",
@@ -2530,6 +2539,16 @@ dependencies = [
"serde",
]
+[[package]]
+name = "cargo-platform"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87a0c0e6148f11f01f32650a2ea02d532b2ad4e81d8bd41e6e565b5adc5e6082"
+dependencies = [
+ "serde",
+ "serde_core",
+]
+
[[package]]
name = "cargo_metadata"
version = "0.19.2"
@@ -2537,7 +2556,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba"
dependencies = [
"camino",
- "cargo-platform",
+ "cargo-platform 0.1.9",
+ "semver",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.17",
+]
+
+[[package]]
+name = "cargo_metadata"
+version = "0.23.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ef987d17b0a113becdd19d3d0022d04d7ef41f9efe4f3fb63ac44ba61df3ade9"
+dependencies = [
+ "camino",
+ "cargo-platform 0.3.2",
"semver",
"serde",
"serde_json",
@@ -2669,7 +2702,7 @@ dependencies = [
"client",
"clock",
"collections",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"language",
@@ -2856,6 +2889,7 @@ dependencies = [
"chrono",
"clock",
"cloud_api_client",
+ "cloud_api_types",
"cloud_llm_client",
"collections",
"credentials_provider",
@@ -2863,12 +2897,13 @@ dependencies = [
"derive_more",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_tokio",
"http_client",
"http_client_tls",
"httparse",
+ "language_model",
"log",
"objc2-foundation",
"parking_lot",
@@ -2900,6 +2935,7 @@ dependencies = [
"util",
"windows 0.61.3",
"worktree",
+ "zed_credentials_provider",
]
[[package]]
@@ -2917,7 +2953,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"cloud_api_types",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_tokio",
"http_client",
@@ -3049,7 +3085,7 @@ dependencies = [
"anyhow",
"edit_prediction",
"edit_prediction_types",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"icons",
@@ -3059,6 +3095,7 @@ dependencies = [
"serde",
"serde_json",
"text",
+ "zed_credentials_provider",
"zeta_prompt",
]
@@ -3095,7 +3132,7 @@ dependencies = [
"extension",
"file_finder",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"git_hosting_providers",
"git_ui",
@@ -3172,7 +3209,7 @@ dependencies = [
"collections",
"db",
"editor",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"livekit_client",
@@ -3280,6 +3317,25 @@ dependencies = [
"workspace",
]
+[[package]]
+name = "compliance"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "async-trait",
+ "derive_more",
+ "futures 0.3.32",
+ "indoc",
+ "itertools 0.14.0",
+ "jsonwebtoken",
+ "octocrab",
+ "regex",
+ "semver",
+ "serde",
+ "serde_json",
+ "tokio",
+]
+
[[package]]
name = "component"
version = "0.1.0"
@@ -3433,7 +3489,7 @@ dependencies = [
"async-trait",
"base64 0.22.1",
"collections",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"log",
@@ -3494,7 +3550,7 @@ dependencies = [
"edit_prediction_types",
"editor",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"icons",
"indoc",
@@ -3528,7 +3584,7 @@ dependencies = [
"collections",
"dirs 4.0.0",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"log",
@@ -3978,7 +4034,7 @@ version = "0.1.0"
dependencies = [
"cfg-if",
"crash-handler",
- "futures 0.3.31",
+ "futures 0.3.32",
"log",
"mach2 0.5.0",
"minidumper",
@@ -4035,12 +4091,8 @@ name = "credentials_provider"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
"gpui",
- "paths",
- "release_channel",
"serde",
- "serde_json",
]
[[package]]
@@ -4318,7 +4370,7 @@ dependencies = [
"collections",
"dap-types",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"language",
@@ -4360,7 +4412,7 @@ dependencies = [
"dap",
"dotenvy",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"json_dotpath",
@@ -4531,7 +4583,7 @@ dependencies = [
"anyhow",
"dap",
"editor",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"project",
"serde_json",
@@ -4558,7 +4610,7 @@ dependencies = [
"editor",
"feature_flags",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"hex",
@@ -4613,7 +4665,7 @@ name = "deepseek"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -4733,7 +4785,7 @@ dependencies = [
"async-trait",
"env_logger 0.11.8",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http 1.3.1",
"http_client",
@@ -5115,13 +5167,14 @@ dependencies = [
"collections",
"copilot",
"copilot_ui",
+ "credentials_provider",
"ctor",
"db",
"edit_prediction_context",
"edit_prediction_types",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"heapless",
"indoc",
@@ -5157,6 +5210,7 @@ dependencies = [
"workspace",
"worktree",
"zed_actions",
+ "zed_credentials_provider",
"zeta_prompt",
"zlog",
"zstd",
@@ -5173,6 +5227,7 @@ dependencies = [
"client",
"cloud_llm_client",
"collections",
+ "criterion",
"db",
"debug_adapter_extension",
"dirs 4.0.0",
@@ -5180,7 +5235,7 @@ dependencies = [
"extension",
"flate2",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gaoya",
"gpui",
"gpui_platform",
@@ -5232,7 +5287,7 @@ dependencies = [
"clock",
"collections",
"env_logger 0.11.8",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"indoc",
"language",
@@ -5281,7 +5336,7 @@ dependencies = [
"editor",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"indoc",
"language",
@@ -5326,7 +5381,7 @@ dependencies = [
"feature_flags",
"file_icons",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"git",
"gpui",
@@ -5582,6 +5637,13 @@ dependencies = [
"log",
]
+[[package]]
+name = "env_var"
+version = "0.1.0"
+dependencies = [
+ "gpui",
+]
+
[[package]]
name = "envy"
version = "0.4.2"
@@ -5733,7 +5795,7 @@ dependencies = [
"extension",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_platform",
"gpui_tokio",
@@ -5843,7 +5905,7 @@ dependencies = [
"collections",
"dap",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"heck 0.5.0",
"http_client",
@@ -5911,7 +5973,7 @@ dependencies = [
"dap",
"extension",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_tokio",
"http_client",
@@ -6119,7 +6181,7 @@ dependencies = [
"ctor",
"editor",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"menu",
@@ -6421,7 +6483,7 @@ dependencies = [
"collections",
"dunce",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"gpui",
"ignore",
@@ -6519,9 +6581,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678"
[[package]]
name = "futures"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876"
+checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d"
dependencies = [
"futures-channel",
"futures-core",
@@ -6534,9 +6596,9 @@ dependencies = [
[[package]]
name = "futures-channel"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10"
+checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d"
dependencies = [
"futures-core",
"futures-sink",
@@ -6557,15 +6619,15 @@ dependencies = [
[[package]]
name = "futures-core"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
+checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d"
[[package]]
name = "futures-executor"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f"
+checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d"
dependencies = [
"futures-core",
"futures-task",
@@ -6585,9 +6647,9 @@ dependencies = [
[[package]]
name = "futures-io"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
+checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718"
[[package]]
name = "futures-lite"
@@ -6619,9 +6681,9 @@ dependencies = [
[[package]]
name = "futures-macro"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
+checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b"
dependencies = [
"proc-macro2",
"quote",
@@ -6630,21 +6692,21 @@ dependencies = [
[[package]]
name = "futures-sink"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7"
+checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893"
[[package]]
name = "futures-task"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
+checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393"
[[package]]
name = "futures-util"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
+checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6"
dependencies = [
"futures 0.1.31",
"futures-channel",
@@ -6653,9 +6715,9 @@ dependencies = [
"futures-macro",
"futures-sink",
"futures-task",
+ "libc",
"memchr",
"pin-project-lite",
- "pin-utils",
"slab",
"tokio-io",
]
@@ -7082,7 +7144,7 @@ dependencies = [
"async-trait",
"collections",
"derive_more",
- "futures 0.3.31",
+ "futures 0.3.32",
"git2",
"gpui",
"http_client",
@@ -7131,7 +7193,6 @@ dependencies = [
"collections",
"db",
"editor",
- "feature_flags",
"fs",
"git",
"git_ui",
@@ -7159,7 +7220,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-trait",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"gpui",
"http_client",
@@ -7189,9 +7250,8 @@ dependencies = [
"ctor",
"db",
"editor",
- "feature_flags",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"git",
"gpui",
@@ -7396,7 +7456,7 @@ name = "google_ai"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -7466,7 +7526,7 @@ dependencies = [
"env_logger 0.11.8",
"etagere",
"foreign-types 0.5.0",
- "futures 0.3.31",
+ "futures 0.3.32",
"futures-concurrency",
"getrandom 0.3.4",
"gpui_macros",
@@ -7541,7 +7601,7 @@ dependencies = [
"calloop-wayland-source",
"collections",
"filedescriptor",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_wgpu",
"http_client",
@@ -7595,7 +7655,7 @@ dependencies = [
"dispatch2",
"etagere",
"foreign-types 0.5.0",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"image",
"itertools 0.14.0",
@@ -7664,7 +7724,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"console_error_panic_hook",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_wgpu",
"http_client",
@@ -7715,7 +7775,7 @@ dependencies = [
"anyhow",
"collections",
"etagere",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"image",
"itertools 0.14.0",
@@ -8199,7 +8259,7 @@ dependencies = [
"async-tar",
"bytes 1.11.1",
"derive_more",
- "futures 0.3.31",
+ "futures 0.3.32",
"http 1.3.1",
"http-body 1.0.1",
"log",
@@ -8316,6 +8376,7 @@ dependencies = [
"http 1.3.1",
"hyper 1.7.0",
"hyper-util",
+ "log",
"rustls 0.23.33",
"rustls-native-certs 0.8.2",
"rustls-pki-types",
@@ -8324,6 +8385,19 @@ dependencies = [
"tower-service",
]
+[[package]]
+name = "hyper-timeout"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0"
+dependencies = [
+ "hyper 1.7.0",
+ "hyper-util",
+ "pin-project-lite",
+ "tokio",
+ "tower-service",
+]
+
[[package]]
name = "hyper-tls"
version = "0.5.0"
@@ -9082,7 +9156,7 @@ dependencies = [
"async-trait",
"bytes 1.11.1",
"chrono",
- "futures 0.3.31",
+ "futures 0.3.32",
"serde",
"serde_json",
"thiserror 2.0.17",
@@ -9098,7 +9172,7 @@ dependencies = [
"anyhow",
"async-trait",
"async-tungstenite",
- "futures 0.3.31",
+ "futures 0.3.32",
"jupyter-protocol",
"serde",
"serde_json",
@@ -9216,7 +9290,7 @@ dependencies = [
"ec4rs",
"encoding_rs",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"globset",
"gpui",
@@ -9296,7 +9370,7 @@ dependencies = [
"collections",
"extension",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"language",
"log",
@@ -9316,13 +9390,13 @@ dependencies = [
"anthropic",
"anyhow",
"base64 0.22.1",
- "client",
"cloud_api_client",
"cloud_api_types",
"cloud_llm_client",
"collections",
"credentials_provider",
- "futures 0.3.31",
+ "env_var",
+ "futures 0.3.32",
"gpui",
"http_client",
"icons",
@@ -9337,7 +9411,6 @@ dependencies = [
"smol",
"thiserror 2.0.17",
"util",
- "zed_env_vars",
]
[[package]]
@@ -9366,7 +9439,7 @@ dependencies = [
"extension",
"extension_host",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"google_ai",
"gpui",
"gpui_tokio",
@@ -9443,7 +9516,7 @@ dependencies = [
"command_palette_hooks",
"edit_prediction",
"editor",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"itertools 0.14.0",
"language",
@@ -9479,7 +9552,7 @@ dependencies = [
"chrono",
"collections",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"globset",
"gpui",
"grammars",
@@ -9866,7 +9939,7 @@ dependencies = [
"core-video",
"coreaudio-rs 0.12.1",
"cpal",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_platform",
"gpui_tokio",
@@ -9910,7 +9983,7 @@ name = "lmstudio"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -9981,7 +10054,7 @@ dependencies = [
"async-pipe",
"collections",
"ctor",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_util",
"log",
@@ -10001,7 +10074,7 @@ dependencies = [
[[package]]
name = "lsp-types"
version = "0.95.1"
-source = "git+https://github.com/zed-industries/lsp-types?rev=a4f410987660bf560d1e617cb78117c6b6b9f599#a4f410987660bf560d1e617cb78117c6b6b9f599"
+source = "git+https://github.com/zed-industries/lsp-types?rev=c7396459fefc7886b4adfa3b596832405ae1e880#c7396459fefc7886b4adfa3b596832405ae1e880"
dependencies = [
"bitflags 1.3.2",
"serde",
@@ -10121,7 +10194,7 @@ dependencies = [
"collections",
"env_logger 0.11.8",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_platform",
"html5ever 0.27.0",
@@ -10152,6 +10225,7 @@ dependencies = [
"language",
"log",
"markdown",
+ "project",
"settings",
"tempfile",
"theme_settings",
@@ -10568,7 +10642,7 @@ name = "mistral"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -10757,7 +10831,7 @@ name = "nc"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"net",
"smol",
]
@@ -10853,7 +10927,7 @@ dependencies = [
"async-std",
"async-tar",
"async-trait",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"log",
"paths",
@@ -11177,7 +11251,7 @@ version = "0.9.2"
source = "git+https://github.com/KillTheMule/nvim-rs?rev=764dd270c642f77f10f3e19d05cc178a6cbe69f3#764dd270c642f77f10f3e19d05cc178a6cbe69f3"
dependencies = [
"async-trait",
- "futures 0.3.31",
+ "futures 0.3.32",
"log",
"rmp",
"rmpv",
@@ -11372,12 +11446,54 @@ dependencies = [
"memchr",
]
+[[package]]
+name = "octocrab"
+version = "0.49.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "63f6687a23731011d0117f9f4c3cdabaa7b5e42ca671f42b5cc0657c492540e3"
+dependencies = [
+ "arc-swap",
+ "async-trait",
+ "base64 0.22.1",
+ "bytes 1.11.1",
+ "cargo_metadata 0.23.1",
+ "cfg-if",
+ "chrono",
+ "either",
+ "futures 0.3.32",
+ "futures-core",
+ "futures-util",
+ "getrandom 0.2.16",
+ "http 1.3.1",
+ "http-body 1.0.1",
+ "http-body-util",
+ "hyper 1.7.0",
+ "hyper-rustls 0.27.7",
+ "hyper-timeout",
+ "hyper-util",
+ "jsonwebtoken",
+ "once_cell",
+ "percent-encoding",
+ "pin-project",
+ "secrecy",
+ "serde",
+ "serde_json",
+ "serde_path_to_error",
+ "serde_urlencoded",
+ "snafu",
+ "tokio",
+ "tower 0.5.2",
+ "tower-http 0.6.6",
+ "url",
+ "web-time",
+]
+
[[package]]
name = "ollama"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -11484,7 +11600,7 @@ name = "open_ai"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"log",
"rand 0.9.2",
@@ -11502,7 +11618,7 @@ version = "0.1.0"
dependencies = [
"editor",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"picker",
@@ -11523,7 +11639,7 @@ name = "open_router"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -11538,7 +11654,7 @@ name = "opencode"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"google_ai",
"http_client",
"schemars",
@@ -12852,7 +12968,7 @@ checksum = "af3fb618632874fb76937c2361a7f22afd393c982a2165595407edc75b06d3c1"
dependencies = [
"atomic",
"crossbeam-queue",
- "futures 0.3.31",
+ "futures 0.3.32",
"log",
"parking_lot",
"pin-project",
@@ -13085,7 +13201,7 @@ dependencies = [
"extension",
"fancy-regex 0.17.0",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"git",
"git2",
@@ -13138,6 +13254,7 @@ dependencies = [
"wax",
"which 6.0.3",
"worktree",
+ "zed_credentials_provider",
"zeroize",
"zlog",
"ztracing",
@@ -13151,7 +13268,7 @@ dependencies = [
"askpass",
"clap",
"client",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_platform",
"http_client",
@@ -13212,7 +13329,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"editor",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"language",
@@ -13254,7 +13371,7 @@ dependencies = [
"chrono",
"collections",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"handlebars 4.5.0",
@@ -14009,7 +14126,7 @@ dependencies = [
"extension",
"extension_host",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"http_client",
@@ -14193,7 +14310,7 @@ dependencies = [
"base64 0.22.1",
"collections",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"log",
"parking_lot",
@@ -14221,7 +14338,7 @@ dependencies = [
"anyhow",
"askpass",
"auto_update",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"log",
"markdown",
@@ -14259,7 +14376,7 @@ dependencies = [
"extension_host",
"fork",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"git2",
"git_hosting_providers",
@@ -14341,7 +14458,7 @@ dependencies = [
"editor",
"feature_flags",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"html_to_markdown",
"http_client",
@@ -14466,7 +14583,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"bytes 1.11.1",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui_util",
"http_client",
"http_client_tls",
@@ -14638,7 +14755,7 @@ dependencies = [
"async-tungstenite",
"base64 0.22.1",
"collections",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"parking_lot",
"proto",
@@ -14731,7 +14848,7 @@ dependencies = [
"chrono",
"data-encoding",
"dirs 6.0.0",
- "futures 0.3.31",
+ "futures 0.3.32",
"glob",
"jupyter-protocol",
"serde",
@@ -15105,7 +15222,7 @@ dependencies = [
"backtrace",
"chrono",
"flume",
- "futures 0.3.31",
+ "futures 0.3.32",
"parking_lot",
"rand 0.9.2",
"web-time",
@@ -15333,7 +15450,7 @@ dependencies = [
"collections",
"editor",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"itertools 0.14.0",
"language",
@@ -15372,6 +15489,15 @@ dependencies = [
"zeroize",
]
+[[package]]
+name = "secrecy"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e891af845473308773346dc847b2c23ee78fe442e0472ac50e22a18a93d3ae5a"
+dependencies = [
+ "zeroize",
+]
+
[[package]]
name = "security-framework"
version = "2.11.1"
@@ -15612,7 +15738,7 @@ dependencies = [
"collections",
"ec4rs",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"indoc",
"inventory",
@@ -15716,7 +15842,7 @@ dependencies = [
"editor",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"heck 0.5.0",
@@ -15747,6 +15873,7 @@ dependencies = [
"util",
"workspace",
"zed_actions",
+ "zed_credentials_provider",
]
[[package]]
@@ -15862,7 +15989,6 @@ dependencies = [
"agent_ui",
"anyhow",
"chrono",
- "collections",
"editor",
"feature_flags",
"fs",
@@ -16076,6 +16202,27 @@ version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f7a918bd2a9951d18ee6e48f076843e8e73a9a5d22cf05bcd4b7a81bdd04e17"
+[[package]]
+name = "snafu"
+version = "0.8.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e84b3f4eacbf3a1ce05eac6763b4d629d60cbc94d632e4092c54ade71f1e1a2"
+dependencies = [
+ "snafu-derive",
+]
+
+[[package]]
+name = "snafu-derive"
+version = "0.8.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1c97747dbf44bb1ca44a561ece23508e99cb592e862f22222dcf42f51d1e451"
+dependencies = [
+ "heck 0.5.0",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.117",
+]
+
[[package]]
name = "snippet"
version = "0.1.0"
@@ -16092,7 +16239,7 @@ dependencies = [
"collections",
"extension",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"indoc",
"parking_lot",
@@ -16204,7 +16351,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"collections",
- "futures 0.3.31",
+ "futures 0.3.32",
"indoc",
"libsqlite3-sys",
"log",
@@ -17250,7 +17397,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"collections",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"hex",
"log",
@@ -17297,7 +17444,7 @@ dependencies = [
name = "telemetry"
version = "0.1.0"
dependencies = [
- "futures 0.3.31",
+ "futures 0.3.32",
"serde",
"serde_json",
"telemetry_events",
@@ -17352,7 +17499,7 @@ dependencies = [
"alacritty_terminal",
"anyhow",
"collections",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"itertools 0.14.0",
"libc",
@@ -17398,7 +17545,7 @@ dependencies = [
"db",
"dirs 4.0.0",
"editor",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"itertools 0.14.0",
"language",
@@ -18034,7 +18181,7 @@ dependencies = [
"anyhow",
"convert_case 0.8.0",
"editor",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"language",
@@ -18080,8 +18227,10 @@ dependencies = [
"pin-project-lite",
"sync_wrapper 1.0.2",
"tokio",
+ "tokio-util",
"tower-layer",
"tower-service",
+ "tracing",
]
[[package]]
@@ -18119,6 +18268,7 @@ dependencies = [
"tower 0.5.2",
"tower-layer",
"tower-service",
+ "tracing",
]
[[package]]
@@ -18910,7 +19060,7 @@ dependencies = [
"command-fds",
"dirs 4.0.0",
"dunce",
- "futures 0.3.31",
+ "futures 0.3.32",
"futures-lite 1.13.0",
"git2",
"globset",
@@ -19067,7 +19217,7 @@ dependencies = [
"db",
"editor",
"env_logger 0.11.8",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"git_ui",
"gpui",
@@ -19431,7 +19581,7 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7516db7f32decdadb1c3b8deb1b7d78b9df7606c5cc2f6241737c2ab3a0258e"
dependencies = [
- "futures 0.3.31",
+ "futures 0.3.32",
"js-sys",
"wasm-bindgen",
"web-sys",
@@ -19787,7 +19937,7 @@ dependencies = [
"cap-std",
"cap-time-ext",
"fs-set-times",
- "futures 0.3.31",
+ "futures 0.3.32",
"io-extras",
"io-lifetimes",
"rustix 1.1.2",
@@ -19811,7 +19961,7 @@ dependencies = [
"anyhow",
"async-trait",
"bytes 1.11.1",
- "futures 0.3.31",
+ "futures 0.3.32",
"wasmtime",
]
@@ -19829,7 +19979,7 @@ name = "watch"
version = "0.1.0"
dependencies = [
"ctor",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"parking_lot",
"zlog",
@@ -19965,6 +20115,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
dependencies = [
"js-sys",
+ "serde",
"wasm-bindgen",
]
@@ -19999,7 +20150,7 @@ dependencies = [
"client",
"cloud_api_types",
"cloud_llm_client",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"language_model",
@@ -21193,7 +21344,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4db52a11d4dfb0a59f194c064055794ee6564eb1ced88c25da2cf76e50c5621"
dependencies = [
"bitflags 2.10.0",
- "futures 0.3.31",
+ "futures 0.3.32",
"once_cell",
]
@@ -21444,7 +21595,7 @@ dependencies = [
"db",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"gpui",
"http_client",
@@ -21492,7 +21643,7 @@ dependencies = [
"collections",
"encoding_rs",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"git",
"gpui",
@@ -21702,9 +21853,10 @@ dependencies = [
"annotate-snippets",
"anyhow",
"backtrace",
- "cargo_metadata",
+ "cargo_metadata 0.19.2",
"cargo_toml",
"clap",
+ "compliance",
"gh-workflow",
"indexmap",
"indoc",
@@ -21714,6 +21866,7 @@ dependencies = [
"serde_json",
"serde_yaml",
"strum 0.27.2",
+ "tokio",
"toml 0.8.23",
"toml_edit 0.22.27",
]
@@ -21744,7 +21897,7 @@ dependencies = [
"base64 0.22.1",
"bytes 1.11.1",
"flate2",
- "futures 0.3.31",
+ "futures 0.3.32",
"http-body-util",
"hyper 1.7.0",
"hyper-util",
@@ -21949,7 +22102,7 @@ dependencies = [
"feedback",
"file_finder",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"git_graph",
"git_hosting_providers",
@@ -22181,10 +22334,24 @@ dependencies = [
]
[[package]]
-name = "zed_env_vars"
+name = "zed_credentials_provider"
version = "0.1.0"
dependencies = [
+ "anyhow",
+ "credentials_provider",
+ "futures 0.3.32",
"gpui",
+ "paths",
+ "release_channel",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "zed_env_vars"
+version = "0.1.0"
+dependencies = [
+ "env_var",
]
[[package]]
@@ -22220,7 +22387,7 @@ dependencies = [
[[package]]
name = "zed_glsl"
-version = "0.2.2"
+version = "0.2.3"
dependencies = [
"zed_extension_api 0.1.0",
]
@@ -22234,7 +22401,7 @@ dependencies = [
[[package]]
name = "zed_proto"
-version = "0.3.1"
+version = "0.3.2"
dependencies = [
"zed_extension_api 0.7.0",
]
@@ -22325,7 +22492,7 @@ dependencies = [
"asynchronous-codec",
"bytes 1.11.1",
"crossbeam-queue",
- "futures 0.3.31",
+ "futures 0.3.32",
"log",
"num-traits",
"once_cell",
@@ -22375,6 +22542,7 @@ name = "zeta_prompt"
version = "0.1.0"
dependencies = [
"anyhow",
+ "imara-diff",
"indoc",
"serde",
"strum 0.27.2",
diff --git a/Cargo.toml b/Cargo.toml
index 3a393237ab9f5a5a8cd4b02517f6d22382ff51ff..5cb5b991b645ec1b78b16f48493c7c8dc1426344 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -61,6 +61,7 @@ members = [
"crates/edit_prediction_ui",
"crates/editor",
"crates/encoding_selector",
+ "crates/env_var",
"crates/etw_tracing",
"crates/eval_cli",
"crates/eval_utils",
@@ -220,6 +221,7 @@ members = [
"crates/x_ai",
"crates/zed",
"crates/zed_actions",
+ "crates/zed_credentials_provider",
"crates/zed_env_vars",
"crates/zeta_prompt",
"crates/zlog",
@@ -240,6 +242,7 @@ members = [
# Tooling
#
+ "tooling/compliance",
"tooling/perf",
"tooling/xtask",
]
@@ -287,6 +290,7 @@ collab_ui = { path = "crates/collab_ui" }
collections = { path = "crates/collections", version = "0.1.0" }
command_palette = { path = "crates/command_palette" }
command_palette_hooks = { path = "crates/command_palette_hooks" }
+compliance = { path = "tooling/compliance" }
component = { path = "crates/component" }
component_preview = { path = "crates/component_preview" }
context_server = { path = "crates/context_server" }
@@ -309,6 +313,7 @@ dev_container = { path = "crates/dev_container" }
diagnostics = { path = "crates/diagnostics" }
editor = { path = "crates/editor" }
encoding_selector = { path = "crates/encoding_selector" }
+env_var = { path = "crates/env_var" }
etw_tracing = { path = "crates/etw_tracing" }
eval_utils = { path = "crates/eval_utils" }
extension = { path = "crates/extension" }
@@ -465,6 +470,7 @@ worktree = { path = "crates/worktree" }
x_ai = { path = "crates/x_ai" }
zed = { path = "crates/zed" }
zed_actions = { path = "crates/zed_actions" }
+zed_credentials_provider = { path = "crates/zed_credentials_provider" }
zed_env_vars = { path = "crates/zed_env_vars" }
edit_prediction = { path = "crates/edit_prediction" }
zeta_prompt = { path = "crates/zeta_prompt" }
@@ -543,6 +549,7 @@ derive_more = { version = "2.1.1", features = [
"add_assign",
"deref",
"deref_mut",
+ "display",
"from_str",
"mul",
"mul_assign",
@@ -592,7 +599,7 @@ linkify = "0.10.0"
libwebrtc = "0.3.26"
livekit = { version = "0.7.32", features = ["tokio", "rustls-tls-native-roots"] }
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
-lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "a4f410987660bf560d1e617cb78117c6b6b9f599" }
+lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "c7396459fefc7886b4adfa3b596832405ae1e880" }
mach2 = "0.5"
markup5ever_rcdom = "0.3.0"
metal = "0.33"
diff --git a/assets/icons/diff_split.svg b/assets/icons/diff_split.svg
index de2056466f7ef1081ee00dabb8b4d5baa8fc9217..dcafeb8df5c28bcac1f1fe8cf5783eebd8d8cd8a 100644
--- a/assets/icons/diff_split.svg
+++ b/assets/icons/diff_split.svg
@@ -1,5 +1,4 @@
diff --git a/assets/icons/diff_split_auto.svg b/assets/icons/diff_split_auto.svg
new file mode 100644
index 0000000000000000000000000000000000000000..f9dd7076be75aaf3e90286140a60deece5016114
--- /dev/null
+++ b/assets/icons/diff_split_auto.svg
@@ -0,0 +1,7 @@
+
diff --git a/assets/icons/diff_unified.svg b/assets/icons/diff_unified.svg
index b2d3895ae5466454e9cefc4e77e3c3f2a19cde8c..28735c16f682159b6b0a099176d6fc3b75cd248e 100644
--- a/assets/icons/diff_unified.svg
+++ b/assets/icons/diff_unified.svg
@@ -1,4 +1,4 @@
diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json
index 523a961d6964e2c6e08d03b75a3e1eb1890fc586..5ecca68e0404b400af2c285dc51df0a65d6fe07a 100644
--- a/assets/keymaps/default-linux.json
+++ b/assets/keymaps/default-linux.json
@@ -284,12 +284,36 @@
"context": "AcpThread",
"bindings": {
"ctrl--": "pane::GoBack",
+ "pageup": "agent::ScrollOutputPageUp",
+ "pagedown": "agent::ScrollOutputPageDown",
+ "home": "agent::ScrollOutputToTop",
+ "end": "agent::ScrollOutputToBottom",
+ "up": "agent::ScrollOutputLineUp",
+ "down": "agent::ScrollOutputLineDown",
+ "shift-pageup": "agent::ScrollOutputToPreviousMessage",
+ "shift-pagedown": "agent::ScrollOutputToNextMessage",
+ "ctrl-alt-pageup": "agent::ScrollOutputPageUp",
+ "ctrl-alt-pagedown": "agent::ScrollOutputPageDown",
+ "ctrl-alt-home": "agent::ScrollOutputToTop",
+ "ctrl-alt-end": "agent::ScrollOutputToBottom",
+ "ctrl-alt-up": "agent::ScrollOutputLineUp",
+ "ctrl-alt-down": "agent::ScrollOutputLineDown",
+ "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage",
+ "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage",
},
},
{
"context": "AcpThread > Editor",
"use_key_equivalents": true,
"bindings": {
+ "ctrl-alt-pageup": "agent::ScrollOutputPageUp",
+ "ctrl-alt-pagedown": "agent::ScrollOutputPageDown",
+ "ctrl-alt-home": "agent::ScrollOutputToTop",
+ "ctrl-alt-end": "agent::ScrollOutputToBottom",
+ "ctrl-alt-up": "agent::ScrollOutputLineUp",
+ "ctrl-alt-down": "agent::ScrollOutputLineDown",
+ "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage",
+ "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage",
"ctrl-shift-r": "agent::OpenAgentDiff",
"ctrl-shift-d": "git::Diff",
"shift-alt-y": "agent::KeepAll",
@@ -574,6 +598,7 @@
// Change the default action on `menu::Confirm` by setting the parameter
// "alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": true }],
"alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": false }],
+ "ctrl-r": ["projects::OpenRecent", { "create_new_window": false }],
"alt-shift-open": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }],
// Change to open path modal for existing remote connection by setting the parameter
// "alt-ctrl-shift-o": "["projects::OpenRemote", { "from_existing_connection": true }]",
@@ -1123,6 +1148,8 @@
"bindings": {
"ctrl-k": "recent_projects::ToggleActionsMenu",
"ctrl-shift-a": "workspace::AddFolderToProject",
+ "shift-backspace": "recent_projects::RemoveSelected",
+ "ctrl-shift-enter": "recent_projects::AddToWorkspace",
},
},
{
@@ -1249,6 +1276,8 @@
"alt-down": "markdown::ScrollDownByItem",
"ctrl-home": "markdown::ScrollToTop",
"ctrl-end": "markdown::ScrollToBottom",
+ "find": "buffer_search::Deploy",
+ "ctrl-f": "buffer_search::Deploy",
},
},
{
diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json
index 9ca71aa9be3a99b1b52ab8490a6fe841956ecf50..c74b5900001a2c798076783b2741aba84ffc4b15 100644
--- a/assets/keymaps/default-macos.json
+++ b/assets/keymaps/default-macos.json
@@ -327,12 +327,36 @@
"context": "AcpThread",
"bindings": {
"ctrl--": "pane::GoBack",
+ "pageup": "agent::ScrollOutputPageUp",
+ "pagedown": "agent::ScrollOutputPageDown",
+ "home": "agent::ScrollOutputToTop",
+ "end": "agent::ScrollOutputToBottom",
+ "up": "agent::ScrollOutputLineUp",
+ "down": "agent::ScrollOutputLineDown",
+ "shift-pageup": "agent::ScrollOutputToPreviousMessage",
+ "shift-pagedown": "agent::ScrollOutputToNextMessage",
+ "ctrl-pageup": "agent::ScrollOutputPageUp",
+ "ctrl-pagedown": "agent::ScrollOutputPageDown",
+ "ctrl-home": "agent::ScrollOutputToTop",
+ "ctrl-end": "agent::ScrollOutputToBottom",
+ "ctrl-alt-up": "agent::ScrollOutputLineUp",
+ "ctrl-alt-down": "agent::ScrollOutputLineDown",
+ "ctrl-alt-pageup": "agent::ScrollOutputToPreviousMessage",
+ "ctrl-alt-pagedown": "agent::ScrollOutputToNextMessage",
},
},
{
"context": "AcpThread > Editor",
"use_key_equivalents": true,
"bindings": {
+ "ctrl-pageup": "agent::ScrollOutputPageUp",
+ "ctrl-pagedown": "agent::ScrollOutputPageDown",
+ "ctrl-home": "agent::ScrollOutputToTop",
+ "ctrl-end": "agent::ScrollOutputToBottom",
+ "ctrl-alt-up": "agent::ScrollOutputLineUp",
+ "ctrl-alt-down": "agent::ScrollOutputLineDown",
+ "ctrl-alt-pageup": "agent::ScrollOutputToPreviousMessage",
+ "ctrl-alt-pagedown": "agent::ScrollOutputToNextMessage",
"shift-ctrl-r": "agent::OpenAgentDiff",
"shift-ctrl-d": "git::Diff",
"shift-alt-y": "agent::KeepAll",
@@ -644,6 +668,7 @@
// Change the default action on `menu::Confirm` by setting the parameter
// "alt-cmd-o": ["projects::OpenRecent", {"create_new_window": true }],
"alt-cmd-o": ["projects::OpenRecent", { "create_new_window": false }],
+ "ctrl-r": ["projects::OpenRecent", { "create_new_window": false }],
"ctrl-cmd-o": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }],
"ctrl-cmd-shift-o": ["projects::OpenRemote", { "from_existing_connection": true, "create_new_window": false }],
"cmd-ctrl-b": "branches::OpenRecent",
@@ -1188,6 +1213,8 @@
"bindings": {
"cmd-k": "recent_projects::ToggleActionsMenu",
"cmd-shift-a": "workspace::AddFolderToProject",
+ "shift-backspace": "recent_projects::RemoveSelected",
+ "cmd-shift-enter": "recent_projects::AddToWorkspace",
},
},
{
@@ -1349,6 +1376,7 @@
"alt-down": "markdown::ScrollDownByItem",
"cmd-up": "markdown::ScrollToTop",
"cmd-down": "markdown::ScrollToBottom",
+ "cmd-f": "buffer_search::Deploy",
},
},
{
diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json
index 1883d0df0b3ff44ad8dceefb997198cb203a9b8d..a9eb3933423ff60fe60ac391b12773ce7146fb0d 100644
--- a/assets/keymaps/default-windows.json
+++ b/assets/keymaps/default-windows.json
@@ -285,12 +285,36 @@
"context": "AcpThread",
"bindings": {
"ctrl--": "pane::GoBack",
+ "pageup": "agent::ScrollOutputPageUp",
+ "pagedown": "agent::ScrollOutputPageDown",
+ "home": "agent::ScrollOutputToTop",
+ "end": "agent::ScrollOutputToBottom",
+ "up": "agent::ScrollOutputLineUp",
+ "down": "agent::ScrollOutputLineDown",
+ "shift-pageup": "agent::ScrollOutputToPreviousMessage",
+ "shift-pagedown": "agent::ScrollOutputToNextMessage",
+ "ctrl-alt-pageup": "agent::ScrollOutputPageUp",
+ "ctrl-alt-pagedown": "agent::ScrollOutputPageDown",
+ "ctrl-alt-home": "agent::ScrollOutputToTop",
+ "ctrl-alt-end": "agent::ScrollOutputToBottom",
+ "ctrl-alt-up": "agent::ScrollOutputLineUp",
+ "ctrl-alt-down": "agent::ScrollOutputLineDown",
+ "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage",
+ "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage",
},
},
{
"context": "AcpThread > Editor",
"use_key_equivalents": true,
"bindings": {
+ "ctrl-alt-pageup": "agent::ScrollOutputPageUp",
+ "ctrl-alt-pagedown": "agent::ScrollOutputPageDown",
+ "ctrl-alt-home": "agent::ScrollOutputToTop",
+ "ctrl-alt-end": "agent::ScrollOutputToBottom",
+ "ctrl-alt-up": "agent::ScrollOutputLineUp",
+ "ctrl-alt-down": "agent::ScrollOutputLineDown",
+ "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage",
+ "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage",
"ctrl-shift-r": "agent::OpenAgentDiff",
"ctrl-shift-d": "git::Diff",
"shift-alt-y": "agent::KeepAll",
@@ -1134,6 +1158,8 @@
"bindings": {
"ctrl-k": "recent_projects::ToggleActionsMenu",
"ctrl-shift-a": "workspace::AddFolderToProject",
+ "shift-backspace": "recent_projects::RemoveSelected",
+ "ctrl-shift-enter": "recent_projects::AddToWorkspace",
},
},
{
@@ -1274,6 +1300,8 @@
"alt-down": "markdown::ScrollDownByItem",
"ctrl-home": "markdown::ScrollToTop",
"ctrl-end": "markdown::ScrollToBottom",
+ "find": "buffer_search::Deploy",
+ "ctrl-f": "buffer_search::Deploy",
},
},
{
diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json
index 1a7e7bf77248b6f863d4a6dbc1e268b4c5ae3576..220b44ff537ffa791b23c0c5b7d86b6768d74dc2 100644
--- a/assets/keymaps/vim.json
+++ b/assets/keymaps/vim.json
@@ -1096,6 +1096,7 @@
"ctrl-e": "markdown::ScrollDown",
"g g": "markdown::ScrollToTop",
"shift-g": "markdown::ScrollToBottom",
+ "/": "buffer_search::Deploy",
},
},
{
diff --git a/assets/settings/default.json b/assets/settings/default.json
index 2e0ddc2da70af5516d14a2fa8418a759bec62eb1..63e906e3b11206fc458f8d7353f3ecba0abeb825 100644
--- a/assets/settings/default.json
+++ b/assets/settings/default.json
@@ -225,6 +225,11 @@
// 3. Hide on both typing and cursor movement:
// "on_typing_and_movement"
"hide_mouse": "on_typing_and_movement",
+ // Determines whether the focused panel follows the mouse location.
+ "focus_follows_mouse": {
+ "enabled": false,
+ "debounce_ms": 250,
+ },
// Determines how snippets are sorted relative to other completion items.
//
// 1. Place snippets at the top of the completion list:
@@ -1102,11 +1107,14 @@
// "all_screens" - Show these notifications on all screens
// "never" - Never show these notifications
"notify_when_agent_waiting": "primary_screen",
- // Whether to play a sound when the agent has either completed
+ // When to play a sound when the agent has either completed
// its response, or needs user input.
-
- // Default: false
- "play_sound_when_agent_done": false,
+ // "never" - Never play the sound
+ // "when_hidden" - Only play the sound when the agent panel is not visible
+ // "always" - Always play the sound
+ //
+ // Default: never
+ "play_sound_when_agent_done": "never",
// Whether to have edit cards in the agent panel expanded, showing a preview of the full diff.
//
// Default: true
@@ -1136,6 +1144,11 @@
//
// Default: false
"show_turn_stats": false,
+ // Whether to show the merge conflict indicator in the status bar
+ // that offers to resolve conflicts using the agent.
+ //
+ // Default: true
+ "show_merge_conflict_indicator": true,
},
// Whether the screen sharing icon is shown in the os status bar.
"show_call_status_icon": true,
@@ -2404,6 +2417,7 @@
"toggle_relative_line_numbers": false,
"use_system_clipboard": "always",
"use_smartcase_find": false,
+ "use_regex_search": true,
"gdefault": false,
"highlight_on_yank_duration": 200,
"custom_digraphs": {},
@@ -2529,21 +2543,31 @@
"format_dap_log_messages": true,
"button": true,
},
- // Configures any number of settings profiles that are temporarily applied on
- // top of your existing user settings when selected from
- // `settings profile selector: toggle`.
+ // Configures any number of settings profiles that are temporarily applied
+ // when selected from `settings profile selector: toggle`.
+ //
+ // Each profile has an optional `base` ("user" or "default") and a `settings`
+ // object. When `base` is "user" (the default), the profile applies on top of
+ // your user settings. When `base` is "default", user settings are ignored and
+ // the profile applies on top of Zed's defaults.
+ //
// Examples:
// "profiles": {
// "Presenting": {
- // "agent_ui_font_size": 20.0,
- // "buffer_font_size": 20.0,
- // "theme": "One Light",
- // "ui_font_size": 20.0
+ // "base": "default",
+ // "settings": {
+ // "agent_ui_font_size": 20.0,
+ // "buffer_font_size": 20.0,
+ // "theme": "One Light",
+ // "ui_font_size": 20.0
+ // }
// },
// "Python (ty)": {
- // "languages": {
- // "Python": {
- // "language_servers": ["ty"]
+ // "settings": {
+ // "languages": {
+ // "Python": {
+ // "language_servers": ["ty"]
+ // }
// }
// }
// }
diff --git a/assets/settings/initial_tasks.json b/assets/settings/initial_tasks.json
index 0d6f4471320e443f3c4a483f53f6901c76e7dc72..bb6c9c04ae14db8f2d01adabd8d1494caa7d7407 100644
--- a/assets/settings/initial_tasks.json
+++ b/assets/settings/initial_tasks.json
@@ -50,9 +50,9 @@
"show_command": true,
// Which edited buffers to save before running the task:
// * `all` — save all edited buffers
- // * `current` — save current buffer only
+ // * `current` — save currently active buffer only
// * `none` — don't save any buffers
- "save": "all",
+ "save": "none",
// Represents the tags for inline runnable indicators, or spawning multiple tasks at once.
// "tags": []
},
diff --git a/assets/themes/ayu/ayu.json b/assets/themes/ayu/ayu.json
index 3450e35bf62d780bdaf0cff2c6bc9f8bdfea7c1e..f27566c4f72cac3938a752c64d95d0500c595306 100644
--- a/assets/themes/ayu/ayu.json
+++ b/assets/themes/ayu/ayu.json
@@ -283,7 +283,7 @@
"font_weight": null
},
"preproc": {
- "color": "#bfbdb6ff",
+ "color": "#ff8f3fff",
"font_style": null,
"font_weight": null
},
@@ -391,6 +391,16 @@
"color": "#5ac1feff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#aad94cff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#f07178ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -675,7 +685,7 @@
"font_weight": null
},
"preproc": {
- "color": "#5c6166ff",
+ "color": "#fa8d3eff",
"font_style": null,
"font_weight": null
},
@@ -783,6 +793,16 @@
"color": "#3b9ee5ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#6cbf43ff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#ff6666ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -1067,7 +1087,7 @@
"font_weight": null
},
"preproc": {
- "color": "#cccac2ff",
+ "color": "#ffad65ff",
"font_style": null,
"font_weight": null
},
@@ -1175,6 +1195,16 @@
"color": "#72cffeff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#aad94cff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#f07178ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
diff --git a/assets/themes/gruvbox/gruvbox.json b/assets/themes/gruvbox/gruvbox.json
index 16ae188712f7a800ab4fb8a81a2d24cac99da56b..4330df54fccae55e7ca077c0da9a891ee71ebe3a 100644
--- a/assets/themes/gruvbox/gruvbox.json
+++ b/assets/themes/gruvbox/gruvbox.json
@@ -293,7 +293,7 @@
"font_weight": null
},
"preproc": {
- "color": "#fbf1c7ff",
+ "color": "#fb4833ff",
"font_style": null,
"font_weight": null
},
@@ -406,6 +406,16 @@
"color": "#83a598ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#b8bb26ff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#fb4934ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -700,7 +710,7 @@
"font_weight": null
},
"preproc": {
- "color": "#fbf1c7ff",
+ "color": "#fb4833ff",
"font_style": null,
"font_weight": null
},
@@ -813,6 +823,16 @@
"color": "#83a598ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#b8bb26ff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#fb4934ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -1107,7 +1127,7 @@
"font_weight": null
},
"preproc": {
- "color": "#fbf1c7ff",
+ "color": "#fb4833ff",
"font_style": null,
"font_weight": null
},
@@ -1220,6 +1240,16 @@
"color": "#83a598ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#b8bb26ff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#fb4934ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -1514,7 +1544,7 @@
"font_weight": null
},
"preproc": {
- "color": "#282828ff",
+ "color": "#9d0006ff",
"font_style": null,
"font_weight": null
},
@@ -1627,6 +1657,16 @@
"color": "#0b6678ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#79740eff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#9d0006ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -1921,7 +1961,7 @@
"font_weight": null
},
"preproc": {
- "color": "#282828ff",
+ "color": "#9d0006ff",
"font_style": null,
"font_weight": null
},
@@ -2034,6 +2074,16 @@
"color": "#0b6678ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#79740eff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#9d0006ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -2328,7 +2378,7 @@
"font_weight": null
},
"preproc": {
- "color": "#282828ff",
+ "color": "#9d0006ff",
"font_style": null,
"font_weight": null
},
@@ -2441,6 +2491,16 @@
"color": "#0b6678ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#79740eff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#9d0006ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
diff --git a/assets/themes/one/one.json b/assets/themes/one/one.json
index 05af3f5cfeec7d4a24c4fe6d684fb21d04e2d81c..e60b6314b9595ac02bd6a43be4580ba9331ae769 100644
--- a/assets/themes/one/one.json
+++ b/assets/themes/one/one.json
@@ -290,7 +290,7 @@
"font_weight": null
},
"preproc": {
- "color": "#dce0e5ff",
+ "color": "#b477cfff",
"font_style": null,
"font_weight": null
},
@@ -403,6 +403,16 @@
"color": "#73ade9ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#98c379ff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#e06c75ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -692,7 +702,7 @@
"font_weight": null
},
"preproc": {
- "color": "#242529ff",
+ "color": "#a449abff",
"font_style": null,
"font_weight": null
},
@@ -805,6 +815,16 @@
"color": "#5b79e3ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#50a14fff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#e45649ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs
index 937592b8a94df00ca1c7565d43893b99693f8892..36c9fb40c4a573e09da05618a29c1898cced60ad 100644
--- a/crates/acp_thread/src/acp_thread.rs
+++ b/crates/acp_thread/src/acp_thread.rs
@@ -1032,6 +1032,7 @@ pub struct AcpThread {
connection: Rc,
token_usage: Option,
prompt_capabilities: acp::PromptCapabilities,
+ available_commands: Vec,
_observe_prompt_capabilities: Task>,
terminals: HashMap>,
pending_terminal_output: HashMap>>,
@@ -1220,6 +1221,7 @@ impl AcpThread {
session_id,
token_usage: None,
prompt_capabilities,
+ available_commands: Vec::new(),
_observe_prompt_capabilities: task,
terminals: HashMap::default(),
pending_terminal_output: HashMap::default(),
@@ -1239,6 +1241,10 @@ impl AcpThread {
self.prompt_capabilities.clone()
}
+ pub fn available_commands(&self) -> &[acp::AvailableCommand] {
+ &self.available_commands
+ }
+
pub fn draft_prompt(&self) -> Option<&[acp::ContentBlock]> {
self.draft_prompt.as_deref()
}
@@ -1419,7 +1425,10 @@ impl AcpThread {
acp::SessionUpdate::AvailableCommandsUpdate(acp::AvailableCommandsUpdate {
available_commands,
..
- }) => cx.emit(AcpThreadEvent::AvailableCommandsUpdated(available_commands)),
+ }) => {
+ self.available_commands = available_commands.clone();
+ cx.emit(AcpThreadEvent::AvailableCommandsUpdated(available_commands));
+ }
acp::SessionUpdate::CurrentModeUpdate(acp::CurrentModeUpdate {
current_mode_id,
..
@@ -2616,7 +2625,7 @@ impl AcpThread {
text_diff(old_text.as_str(), &content)
.into_iter()
.map(|(range, replacement)| {
- (snapshot.anchor_range_around(range), replacement)
+ (snapshot.anchor_range_inside(range), replacement)
})
.collect::>()
})
diff --git a/crates/acp_thread/src/diff.rs b/crates/acp_thread/src/diff.rs
index 08b1b9bdf24d1ff9980164c1af8b3e60bd2f3339..a6d3b86db7c980bb5e4e5a8cacee95abeaabc3f1 100644
--- a/crates/acp_thread/src/diff.rs
+++ b/crates/acp_thread/src/diff.rs
@@ -191,7 +191,7 @@ impl Diff {
}
pub fn has_revealed_range(&self, cx: &App) -> bool {
- self.multibuffer().read(cx).paths().next().is_some()
+ !self.multibuffer().read(cx).is_empty()
}
pub fn needs_update(&self, old_text: &str, new_text: &str, cx: &App) -> bool {
diff --git a/crates/acp_thread/src/mention.rs b/crates/acp_thread/src/mention.rs
index 753838d3b98ed60dc02c3d9383c28fe4f848a29e..28038ecbc04c59d1c5107872210056f11b413141 100644
--- a/crates/acp_thread/src/mention.rs
+++ b/crates/acp_thread/src/mention.rs
@@ -19,7 +19,9 @@ pub enum MentionUri {
File {
abs_path: PathBuf,
},
- PastedImage,
+ PastedImage {
+ name: String,
+ },
Directory {
abs_path: PathBuf,
},
@@ -155,7 +157,9 @@ impl MentionUri {
include_warnings,
})
} else if path.starts_with("/agent/pasted-image") {
- Ok(Self::PastedImage)
+ let name =
+ single_query_param(&url, "name")?.unwrap_or_else(|| "Image".to_string());
+ Ok(Self::PastedImage { name })
} else if path.starts_with("/agent/untitled-buffer") {
let fragment = url
.fragment()
@@ -227,7 +231,7 @@ impl MentionUri {
.unwrap_or_default()
.to_string_lossy()
.into_owned(),
- MentionUri::PastedImage => "Image".to_string(),
+ MentionUri::PastedImage { name } => name.clone(),
MentionUri::Symbol { name, .. } => name.clone(),
MentionUri::Thread { name, .. } => name.clone(),
MentionUri::Rule { name, .. } => name.clone(),
@@ -296,7 +300,7 @@ impl MentionUri {
MentionUri::File { abs_path } => {
FileIcons::get_icon(abs_path, cx).unwrap_or_else(|| IconName::File.path().into())
}
- MentionUri::PastedImage => IconName::Image.path().into(),
+ MentionUri::PastedImage { .. } => IconName::Image.path().into(),
MentionUri::Directory { abs_path } => FileIcons::get_folder_icon(false, abs_path, cx)
.unwrap_or_else(|| IconName::Folder.path().into()),
MentionUri::Symbol { .. } => IconName::Code.path().into(),
@@ -322,10 +326,18 @@ impl MentionUri {
url.set_path(&abs_path.to_string_lossy());
url
}
- MentionUri::PastedImage => Url::parse("zed:///agent/pasted-image").unwrap(),
+ MentionUri::PastedImage { name } => {
+ let mut url = Url::parse("zed:///agent/pasted-image").unwrap();
+ url.query_pairs_mut().append_pair("name", name);
+ url
+ }
MentionUri::Directory { abs_path } => {
let mut url = Url::parse("file:///").unwrap();
- url.set_path(&abs_path.to_string_lossy());
+ let mut path = abs_path.to_string_lossy().into_owned();
+ if !path.ends_with('/') && !path.ends_with('\\') {
+ path.push('/');
+ }
+ url.set_path(&path);
url
}
MentionUri::Symbol {
@@ -490,6 +502,21 @@ mod tests {
assert_eq!(uri.to_uri().to_string(), expected);
}
+ #[test]
+ fn test_directory_uri_round_trip_without_trailing_slash() {
+ let uri = MentionUri::Directory {
+ abs_path: PathBuf::from(path!("/path/to/dir")),
+ };
+ let serialized = uri.to_uri().to_string();
+ assert!(serialized.ends_with('/'), "directory URI must end with /");
+ let parsed = MentionUri::parse(&serialized, PathStyle::local()).unwrap();
+ assert!(
+ matches!(parsed, MentionUri::Directory { .. }),
+ "expected Directory variant, got {:?}",
+ parsed
+ );
+ }
+
#[test]
fn test_parse_symbol_uri() {
let symbol_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20");
diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs
index 3faf767c7020763eadc7db6c93af42f650a07434..1f17d38f7d2a2770350026f2f145a53723ef7481 100644
--- a/crates/action_log/src/action_log.rs
+++ b/crates/action_log/src/action_log.rs
@@ -738,6 +738,7 @@ impl ActionLog {
let task = if let Some(existing_file_content) = existing_file_content {
// Capture the agent's content before restoring existing file content
let agent_content = buffer.read(cx).text();
+ let buffer_id = buffer.read(cx).remote_id();
buffer.update(cx, |buffer, cx| {
buffer.start_transaction();
@@ -750,7 +751,10 @@ impl ActionLog {
undo_info = Some(PerBufferUndo {
buffer: buffer.downgrade(),
- edits_to_restore: vec![(Anchor::MIN..Anchor::MAX, agent_content)],
+ edits_to_restore: vec![(
+ Anchor::min_for_buffer(buffer_id)..Anchor::max_for_buffer(buffer_id),
+ agent_content,
+ )],
status: UndoBufferStatus::Created {
had_existing_content: true,
},
@@ -990,8 +994,8 @@ impl ActionLog {
let mut valid_edits = Vec::new();
for (anchor_range, text_to_restore) in per_buffer_undo.edits_to_restore {
- if anchor_range.start.buffer_id == Some(buffer.remote_id())
- && anchor_range.end.buffer_id == Some(buffer.remote_id())
+ if anchor_range.start.buffer_id == buffer.remote_id()
+ && anchor_range.end.buffer_id == buffer.remote_id()
{
valid_edits.push((anchor_range, text_to_restore));
}
diff --git a/crates/agent/src/edit_agent.rs b/crates/agent/src/edit_agent.rs
index 6e6cf9735a922695bf089bdcc78798fb086ad364..afaa124de066d92e5a1d1a1670f762017f086d01 100644
--- a/crates/agent/src/edit_agent.rs
+++ b/crates/agent/src/edit_agent.rs
@@ -374,13 +374,13 @@ impl EditAgent {
buffer.edit(edits.iter().cloned(), None, cx);
let max_edit_end = buffer
.summaries_for_anchors::(
- edits.iter().map(|(range, _)| &range.end),
+ edits.iter().map(|(range, _)| range.end),
)
.max()
.unwrap();
let min_edit_start = buffer
.summaries_for_anchors::(
- edits.iter().map(|(range, _)| &range.start),
+ edits.iter().map(|(range, _)| range.start),
)
.min()
.unwrap();
@@ -1519,7 +1519,7 @@ mod tests {
stream: &mut UnboundedReceiver,
) -> Vec {
let mut events = Vec::new();
- while let Ok(Some(event)) = stream.try_next() {
+ while let Ok(event) = stream.try_recv() {
events.push(event);
}
events
diff --git a/crates/agent/src/edit_agent/evals.rs b/crates/agent/src/edit_agent/evals.rs
index e7b67e37bf4a8b71664a78b99b757c6985794ec6..ba8b7ed867ea26bcdcdee7f8bf20390c2f9592b3 100644
--- a/crates/agent/src/edit_agent/evals.rs
+++ b/crates/agent/src/edit_agent/evals.rs
@@ -4,7 +4,7 @@ use crate::{
ListDirectoryTool, ListDirectoryToolInput, ReadFileTool, ReadFileToolInput,
};
use Role::*;
-use client::{Client, UserStore};
+use client::{Client, RefreshLlmTokenListener, UserStore};
use eval_utils::{EvalOutput, EvalOutputProcessor, OutcomeKind};
use fs::FakeFs;
use futures::{FutureExt, future::LocalBoxFuture};
@@ -1423,7 +1423,8 @@ impl EditAgentTest {
let client = Client::production(cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
settings::init(cx);
- language_model::init(user_store.clone(), client.clone(), cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
language_models::init(user_store, client.clone(), cx);
});
diff --git a/crates/agent/src/tests/edit_file_thread_test.rs b/crates/agent/src/tests/edit_file_thread_test.rs
index 3beb5cb0d51abc55fbf3cf0849ced248a9d1fa5c..b5ce6441e790e0b79b2798dfe0008cc74eec69b8 100644
--- a/crates/agent/src/tests/edit_file_thread_test.rs
+++ b/crates/agent/src/tests/edit_file_thread_test.rs
@@ -202,3 +202,214 @@ async fn test_edit_file_tool_in_thread_context(cx: &mut TestAppContext) {
);
});
}
+
+#[gpui::test]
+async fn test_streaming_edit_json_parse_error_does_not_cause_unsaved_changes(
+ cx: &mut TestAppContext,
+) {
+ super::init_test(cx);
+ super::always_allow_tools(cx);
+
+ // Enable the streaming edit file tool feature flag.
+ cx.update(|cx| {
+ cx.update_flags(true, vec!["streaming-edit-file-tool".to_string()]);
+ });
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/project"),
+ json!({
+ "src": {
+ "main.rs": "fn main() {\n println!(\"Hello, world!\");\n}\n"
+ }
+ }),
+ )
+ .await;
+
+ let project = project::Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
+ let project_context = cx.new(|_cx| ProjectContext::default());
+ let context_server_store = project.read_with(cx, |project, _| project.context_server_store());
+ let context_server_registry =
+ cx.new(|cx| crate::ContextServerRegistry::new(context_server_store.clone(), cx));
+ let model = Arc::new(FakeLanguageModel::default());
+ model.as_fake().set_supports_streaming_tools(true);
+ let fake_model = model.as_fake();
+
+ let thread = cx.new(|cx| {
+ let mut thread = crate::Thread::new(
+ project.clone(),
+ project_context,
+ context_server_registry,
+ crate::Templates::new(),
+ Some(model.clone()),
+ cx,
+ );
+ let language_registry = project.read(cx).languages().clone();
+ thread.add_tool(crate::StreamingEditFileTool::new(
+ project.clone(),
+ cx.weak_entity(),
+ thread.action_log().clone(),
+ language_registry,
+ ));
+ thread
+ });
+
+ let _events = thread
+ .update(cx, |thread, cx| {
+ thread.send(
+ UserMessageId::new(),
+ ["Write new content to src/main.rs"],
+ cx,
+ )
+ })
+ .unwrap();
+ cx.run_until_parked();
+
+ let tool_use_id = "edit_1";
+ let partial_1 = LanguageModelToolUse {
+ id: tool_use_id.into(),
+ name: EditFileTool::NAME.into(),
+ raw_input: json!({
+ "display_description": "Rewrite main.rs",
+ "path": "project/src/main.rs",
+ "mode": "write"
+ })
+ .to_string(),
+ input: json!({
+ "display_description": "Rewrite main.rs",
+ "path": "project/src/main.rs",
+ "mode": "write"
+ }),
+ is_input_complete: false,
+ thought_signature: None,
+ };
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(partial_1));
+ cx.run_until_parked();
+
+ let partial_2 = LanguageModelToolUse {
+ id: tool_use_id.into(),
+ name: EditFileTool::NAME.into(),
+ raw_input: json!({
+ "display_description": "Rewrite main.rs",
+ "path": "project/src/main.rs",
+ "mode": "write",
+ "content": "fn main() { /* rewritten */ }"
+ })
+ .to_string(),
+ input: json!({
+ "display_description": "Rewrite main.rs",
+ "path": "project/src/main.rs",
+ "mode": "write",
+ "content": "fn main() { /* rewritten */ }"
+ }),
+ is_input_complete: false,
+ thought_signature: None,
+ };
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(partial_2));
+ cx.run_until_parked();
+
+ // Now send a json parse error. At this point we have started writing content to the buffer.
+ fake_model.send_last_completion_stream_event(
+ LanguageModelCompletionEvent::ToolUseJsonParseError {
+ id: tool_use_id.into(),
+ tool_name: EditFileTool::NAME.into(),
+ raw_input: r#"{"display_description":"Rewrite main.rs","path":"project/src/main.rs","mode":"write","content":"fn main() { /* rewritten "#.into(),
+ json_parse_error: "EOF while parsing a string at line 1 column 95".into(),
+ },
+ );
+ fake_model
+ .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse));
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+
+ // cx.executor().advance_clock(Duration::from_secs(5));
+ // cx.run_until_parked();
+
+ assert!(
+ !fake_model.pending_completions().is_empty(),
+ "Thread should have retried after the error"
+ );
+
+ // Respond with a new, well-formed, complete edit_file tool use.
+ let tool_use = LanguageModelToolUse {
+ id: "edit_2".into(),
+ name: EditFileTool::NAME.into(),
+ raw_input: json!({
+ "display_description": "Rewrite main.rs",
+ "path": "project/src/main.rs",
+ "mode": "write",
+ "content": "fn main() {\n println!(\"Hello, rewritten!\");\n}\n"
+ })
+ .to_string(),
+ input: json!({
+ "display_description": "Rewrite main.rs",
+ "path": "project/src/main.rs",
+ "mode": "write",
+ "content": "fn main() {\n println!(\"Hello, rewritten!\");\n}\n"
+ }),
+ is_input_complete: true,
+ thought_signature: None,
+ };
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use));
+ fake_model
+ .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse));
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+
+ let pending_completions = fake_model.pending_completions();
+ assert!(
+ pending_completions.len() == 1,
+ "Expected only the follow-up completion containing the successful tool result"
+ );
+
+ let completion = pending_completions
+ .into_iter()
+ .last()
+ .expect("Expected a completion containing the tool result for edit_2");
+
+ let tool_result = completion
+ .messages
+ .iter()
+ .flat_map(|msg| &msg.content)
+ .find_map(|content| match content {
+ language_model::MessageContent::ToolResult(result)
+ if result.tool_use_id == language_model::LanguageModelToolUseId::from("edit_2") =>
+ {
+ Some(result)
+ }
+ _ => None,
+ })
+ .expect("Should have a tool result for edit_2");
+
+ // Ensure that the second tool call completed successfully and edits were applied.
+ assert!(
+ !tool_result.is_error,
+ "Tool result should succeed, got: {:?}",
+ tool_result
+ );
+ let content_text = match &tool_result.content {
+ language_model::LanguageModelToolResultContent::Text(t) => t.to_string(),
+ other => panic!("Expected text content, got: {:?}", other),
+ };
+ assert!(
+ !content_text.contains("file has been modified since you last read it"),
+ "Did not expect a stale last-read error, got: {content_text}"
+ );
+ assert!(
+ !content_text.contains("This file has unsaved changes"),
+ "Did not expect an unsaved-changes error, got: {content_text}"
+ );
+
+ let file_content = fs
+ .load(path!("/project/src/main.rs").as_ref())
+ .await
+ .expect("file should exist");
+ super::assert_eq!(
+ file_content,
+ "fn main() {\n println!(\"Hello, rewritten!\");\n}\n",
+ "The second edit should be applied and saved gracefully"
+ );
+
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+}
diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs
index 036a6f1030c43b16d51f864a1d0176891e90b772..ff53136a0ded4bbc283fea30598d8d30e6e29709 100644
--- a/crates/agent/src/tests/mod.rs
+++ b/crates/agent/src/tests/mod.rs
@@ -6,7 +6,7 @@ use acp_thread::{
use agent_client_protocol::{self as acp};
use agent_settings::AgentProfileId;
use anyhow::Result;
-use client::{Client, UserStore};
+use client::{Client, RefreshLlmTokenListener, UserStore};
use collections::IndexMap;
use context_server::{ContextServer, ContextServerCommand, ContextServerId};
use feature_flags::FeatureFlagAppExt as _;
@@ -3253,7 +3253,8 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
let clock = Arc::new(clock::FakeSystemClock::new());
let client = Client::new(clock, http_client, cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
- language_model::init(user_store.clone(), client.clone(), cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
language_models::init(user_store, client.clone(), cx);
LanguageModelRegistry::test(cx);
});
@@ -3902,6 +3903,117 @@ async fn test_streaming_tool_completes_when_llm_stream_ends_without_final_input(
});
}
+#[gpui::test]
+async fn test_streaming_tool_json_parse_error_is_forwarded_to_running_tool(
+ cx: &mut TestAppContext,
+) {
+ init_test(cx);
+ always_allow_tools(cx);
+
+ let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
+ let fake_model = model.as_fake();
+
+ thread.update(cx, |thread, _cx| {
+ thread.add_tool(StreamingJsonErrorContextTool);
+ });
+
+ let _events = thread
+ .update(cx, |thread, cx| {
+ thread.send(
+ UserMessageId::new(),
+ ["Use the streaming_json_error_context tool"],
+ cx,
+ )
+ })
+ .unwrap();
+ cx.run_until_parked();
+
+ let tool_use = LanguageModelToolUse {
+ id: "tool_1".into(),
+ name: StreamingJsonErrorContextTool::NAME.into(),
+ raw_input: r#"{"text": "partial"#.into(),
+ input: json!({"text": "partial"}),
+ is_input_complete: false,
+ thought_signature: None,
+ };
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use));
+ cx.run_until_parked();
+
+ fake_model.send_last_completion_stream_event(
+ LanguageModelCompletionEvent::ToolUseJsonParseError {
+ id: "tool_1".into(),
+ tool_name: StreamingJsonErrorContextTool::NAME.into(),
+ raw_input: r#"{"text": "partial"#.into(),
+ json_parse_error: "EOF while parsing a string at line 1 column 17".into(),
+ },
+ );
+ fake_model
+ .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse));
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+
+ cx.executor().advance_clock(Duration::from_secs(5));
+ cx.run_until_parked();
+
+ let completion = fake_model
+ .pending_completions()
+ .pop()
+ .expect("No running turn");
+
+ let tool_results: Vec<_> = completion
+ .messages
+ .iter()
+ .flat_map(|message| &message.content)
+ .filter_map(|content| match content {
+ MessageContent::ToolResult(result)
+ if result.tool_use_id == language_model::LanguageModelToolUseId::from("tool_1") =>
+ {
+ Some(result)
+ }
+ _ => None,
+ })
+ .collect();
+
+ assert_eq!(
+ tool_results.len(),
+ 1,
+ "Expected exactly 1 tool result for tool_1, got {}: {:#?}",
+ tool_results.len(),
+ tool_results
+ );
+
+ let result = tool_results[0];
+ assert!(result.is_error);
+ let content_text = match &result.content {
+ language_model::LanguageModelToolResultContent::Text(text) => text.to_string(),
+ other => panic!("Expected text content, got {:?}", other),
+ };
+ assert!(
+ content_text.contains("Saw partial text 'partial' before invalid JSON"),
+ "Expected tool-enriched partial context, got: {content_text}"
+ );
+ assert!(
+ content_text
+ .contains("Error parsing input JSON: EOF while parsing a string at line 1 column 17"),
+ "Expected forwarded JSON parse error, got: {content_text}"
+ );
+ assert!(
+ !content_text.contains("tool input was not fully received"),
+ "Should not contain orphaned sender error, got: {content_text}"
+ );
+
+ fake_model.send_last_completion_stream_text_chunk("Done");
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+
+ thread.read_with(cx, |thread, _cx| {
+ assert!(
+ thread.is_turn_complete(),
+ "Thread should not be stuck; the turn should have completed",
+ );
+ });
+}
+
/// Filters out the stop events for asserting against in tests
fn stop_events(result_events: Vec>) -> Vec {
result_events
@@ -3958,6 +4070,7 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
InfiniteTool::NAME: true,
CancellationAwareTool::NAME: true,
StreamingEchoTool::NAME: true,
+ StreamingJsonErrorContextTool::NAME: true,
StreamingFailingEchoTool::NAME: true,
TerminalTool::NAME: true,
UpdatePlanTool::NAME: true,
@@ -3982,7 +4095,8 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
cx.set_http_client(Arc::new(http_client));
let client = Client::production(cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
- language_model::init(user_store.clone(), client.clone(), cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
language_models::init(user_store, client.clone(), cx);
}
};
@@ -6206,9 +6320,9 @@ async fn test_edit_file_tool_allow_rule_skips_confirmation(cx: &mut TestAppConte
cx.run_until_parked();
- let event = rx.try_next();
+ let event = rx.try_recv();
assert!(
- !matches!(event, Ok(Some(Ok(ThreadEvent::ToolCallAuthorization(_))))),
+ !matches!(event, Ok(Ok(ThreadEvent::ToolCallAuthorization(_)))),
"expected no authorization request for allowed .md file"
);
}
@@ -6350,9 +6464,9 @@ async fn test_fetch_tool_allow_rule_skips_confirmation(cx: &mut TestAppContext)
cx.run_until_parked();
- let event = rx.try_next();
+ let event = rx.try_recv();
assert!(
- !matches!(event, Ok(Some(Ok(ThreadEvent::ToolCallAuthorization(_))))),
+ !matches!(event, Ok(Ok(ThreadEvent::ToolCallAuthorization(_)))),
"expected no authorization request for allowed docs.rs URL"
);
}
diff --git a/crates/agent/src/tests/test_tools.rs b/crates/agent/src/tests/test_tools.rs
index f36549a6c42f9e810c7794d8ec683613b6ae6933..4744204fae1213d49af92339b8847e9d1f470125 100644
--- a/crates/agent/src/tests/test_tools.rs
+++ b/crates/agent/src/tests/test_tools.rs
@@ -56,13 +56,12 @@ impl AgentTool for StreamingEchoTool {
fn run(
self: Arc,
- mut input: ToolInput,
+ input: ToolInput,
_event_stream: ToolCallEventStream,
cx: &mut App,
) -> Task> {
let wait_until_complete_rx = self.wait_until_complete_rx.lock().unwrap().take();
cx.spawn(async move |_cx| {
- while input.recv_partial().await.is_some() {}
let input = input
.recv()
.await
@@ -75,6 +74,68 @@ impl AgentTool for StreamingEchoTool {
}
}
+#[derive(JsonSchema, Serialize, Deserialize)]
+pub struct StreamingJsonErrorContextToolInput {
+ /// The text to echo.
+ pub text: String,
+}
+
+pub struct StreamingJsonErrorContextTool;
+
+impl AgentTool for StreamingJsonErrorContextTool {
+ type Input = StreamingJsonErrorContextToolInput;
+ type Output = String;
+
+ const NAME: &'static str = "streaming_json_error_context";
+
+ fn supports_input_streaming() -> bool {
+ true
+ }
+
+ fn kind() -> acp::ToolKind {
+ acp::ToolKind::Other
+ }
+
+ fn initial_title(
+ &self,
+ _input: Result,
+ _cx: &mut App,
+ ) -> SharedString {
+ "Streaming JSON Error Context".into()
+ }
+
+ fn run(
+ self: Arc,
+ mut input: ToolInput,
+ _event_stream: ToolCallEventStream,
+ cx: &mut App,
+ ) -> Task> {
+ cx.spawn(async move |_cx| {
+ let mut last_partial_text = None;
+
+ loop {
+ match input.next().await {
+ Ok(ToolInputPayload::Partial(partial)) => {
+ if let Some(text) = partial.get("text").and_then(|value| value.as_str()) {
+ last_partial_text = Some(text.to_string());
+ }
+ }
+ Ok(ToolInputPayload::Full(input)) => return Ok(input.text),
+ Ok(ToolInputPayload::InvalidJson { error_message }) => {
+ let partial_text = last_partial_text.unwrap_or_default();
+ return Err(format!(
+ "Saw partial text '{partial_text}' before invalid JSON: {error_message}"
+ ));
+ }
+ Err(error) => {
+ return Err(format!("Failed to receive tool input: {error}"));
+ }
+ }
+ }
+ })
+ }
+}
+
/// A streaming tool that echoes its input, used to test streaming tool
/// lifecycle (e.g. partial delivery and cleanup when the LLM stream ends
/// before `is_input_complete`).
@@ -119,7 +180,7 @@ impl AgentTool for StreamingFailingEchoTool {
) -> Task> {
cx.spawn(async move |_cx| {
for _ in 0..self.receive_chunks_until_failure {
- let _ = input.recv_partial().await;
+ let _ = input.next().await;
}
Err("failed".into())
})
diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs
index b61df1b8af84d312d7f186fb85e5a1d04ab59dfd..ea342e8db4e4d97d5eccc849121cd0fd2e403017 100644
--- a/crates/agent/src/thread.rs
+++ b/crates/agent/src/thread.rs
@@ -22,13 +22,13 @@ use client::UserStore;
use cloud_api_types::Plan;
use collections::{HashMap, HashSet, IndexMap};
use fs::Fs;
-use futures::stream;
use futures::{
FutureExt,
channel::{mpsc, oneshot},
future::Shared,
stream::FuturesUnordered,
};
+use futures::{StreamExt, stream};
use gpui::{
App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity,
};
@@ -47,7 +47,6 @@ use schemars::{JsonSchema, Schema};
use serde::de::DeserializeOwned;
use serde::{Deserialize, Serialize};
use settings::{LanguageModelSelection, Settings, ToolPermissionMode, update_settings_file};
-use smol::stream::StreamExt;
use std::{
collections::BTreeMap,
marker::PhantomData,
@@ -253,7 +252,7 @@ impl UserMessage {
)
.ok();
}
- MentionUri::PastedImage => {
+ MentionUri::PastedImage { .. } => {
debug_panic!("pasted image URI should not be used in mention content")
}
MentionUri::Directory { .. } => {
@@ -2095,7 +2094,7 @@ impl Thread {
this.update(cx, |this, _cx| {
this.pending_message()
.tool_results
- .insert(tool_result.tool_use_id.clone(), tool_result);
+ .insert(tool_result.tool_use_id.clone(), tool_result)
})?;
Ok(())
}
@@ -2195,15 +2194,15 @@ impl Thread {
raw_input,
json_parse_error,
} => {
- return Ok(Some(Task::ready(
- self.handle_tool_use_json_parse_error_event(
- id,
- tool_name,
- raw_input,
- json_parse_error,
- event_stream,
- ),
- )));
+ return Ok(self.handle_tool_use_json_parse_error_event(
+ id,
+ tool_name,
+ raw_input,
+ json_parse_error,
+ event_stream,
+ cancellation_rx,
+ cx,
+ ));
}
UsageUpdate(usage) => {
telemetry::event!(
@@ -2304,12 +2303,12 @@ impl Thread {
if !tool_use.is_input_complete {
if tool.supports_input_streaming() {
let running_turn = self.running_turn.as_mut()?;
- if let Some(sender) = running_turn.streaming_tool_inputs.get(&tool_use.id) {
+ if let Some(sender) = running_turn.streaming_tool_inputs.get_mut(&tool_use.id) {
sender.send_partial(tool_use.input);
return None;
}
- let (sender, tool_input) = ToolInputSender::channel();
+ let (mut sender, tool_input) = ToolInputSender::channel();
sender.send_partial(tool_use.input);
running_turn
.streaming_tool_inputs
@@ -2331,13 +2330,13 @@ impl Thread {
}
}
- if let Some(sender) = self
+ if let Some(mut sender) = self
.running_turn
.as_mut()?
.streaming_tool_inputs
.remove(&tool_use.id)
{
- sender.send_final(tool_use.input);
+ sender.send_full(tool_use.input);
return None;
}
@@ -2410,10 +2409,12 @@ impl Thread {
raw_input: Arc,
json_parse_error: String,
event_stream: &ThreadEventStream,
- ) -> LanguageModelToolResult {
+ cancellation_rx: watch::Receiver,
+ cx: &mut Context,
+ ) -> Option> {
let tool_use = LanguageModelToolUse {
- id: tool_use_id.clone(),
- name: tool_name.clone(),
+ id: tool_use_id,
+ name: tool_name,
raw_input: raw_input.to_string(),
input: serde_json::json!({}),
is_input_complete: true,
@@ -2426,14 +2427,43 @@ impl Thread {
event_stream,
);
- let tool_output = format!("Error parsing input JSON: {json_parse_error}");
- LanguageModelToolResult {
- tool_use_id,
- tool_name,
- is_error: true,
- content: LanguageModelToolResultContent::Text(tool_output.into()),
- output: Some(serde_json::Value::String(raw_input.to_string())),
+ let tool = self.tool(tool_use.name.as_ref());
+
+ let Some(tool) = tool else {
+ let content = format!("No tool named {} exists", tool_use.name);
+ return Some(Task::ready(LanguageModelToolResult {
+ content: LanguageModelToolResultContent::Text(Arc::from(content)),
+ tool_use_id: tool_use.id,
+ tool_name: tool_use.name,
+ is_error: true,
+ output: None,
+ }));
+ };
+
+ let error_message = format!("Error parsing input JSON: {json_parse_error}");
+
+ if tool.supports_input_streaming()
+ && let Some(mut sender) = self
+ .running_turn
+ .as_mut()?
+ .streaming_tool_inputs
+ .remove(&tool_use.id)
+ {
+ sender.send_invalid_json(error_message);
+ return None;
}
+
+ log::debug!("Running tool {}. Received invalid JSON", tool_use.name);
+ let tool_input = ToolInput::invalid_json(error_message);
+ Some(self.run_tool(
+ tool,
+ tool_input,
+ tool_use.id,
+ tool_use.name,
+ event_stream,
+ cancellation_rx,
+ cx,
+ ))
}
fn send_or_update_tool_use(
@@ -3114,8 +3144,7 @@ impl EventEmitter for Thread {}
/// For streaming tools, partial JSON snapshots arrive via `.recv_partial()` as the LLM streams
/// them, followed by the final complete input available through `.recv()`.
pub struct ToolInput {
- partial_rx: mpsc::UnboundedReceiver,
- final_rx: oneshot::Receiver,
+ rx: mpsc::UnboundedReceiver>,
_phantom: PhantomData,
}
@@ -3127,13 +3156,20 @@ impl ToolInput {
}
pub fn ready(value: serde_json::Value) -> Self {
- let (partial_tx, partial_rx) = mpsc::unbounded();
- drop(partial_tx);
- let (final_tx, final_rx) = oneshot::channel();
- final_tx.send(value).ok();
+ let (tx, rx) = mpsc::unbounded();
+ tx.unbounded_send(ToolInputPayload::Full(value)).ok();
Self {
- partial_rx,
- final_rx,
+ rx,
+ _phantom: PhantomData,
+ }
+ }
+
+ pub fn invalid_json(error_message: String) -> Self {
+ let (tx, rx) = mpsc::unbounded();
+ tx.unbounded_send(ToolInputPayload::InvalidJson { error_message })
+ .ok();
+ Self {
+ rx,
_phantom: PhantomData,
}
}
@@ -3147,65 +3183,89 @@ impl ToolInput {
/// Wait for the final deserialized input, ignoring all partial updates.
/// Non-streaming tools can use this to wait until the whole input is available.
pub async fn recv(mut self) -> Result {
- // Drain any remaining partials
- while self.partial_rx.next().await.is_some() {}
+ while let Ok(value) = self.next().await {
+ match value {
+ ToolInputPayload::Full(value) => return Ok(value),
+ ToolInputPayload::Partial(_) => {}
+ ToolInputPayload::InvalidJson { error_message } => {
+ return Err(anyhow!(error_message));
+ }
+ }
+ }
+ Err(anyhow!("tool input was not fully received"))
+ }
+
+ pub async fn next(&mut self) -> Result> {
let value = self
- .final_rx
+ .rx
+ .next()
.await
- .map_err(|_| anyhow!("tool input was not fully received"))?;
- serde_json::from_value(value).map_err(Into::into)
- }
+ .ok_or_else(|| anyhow!("tool input was not fully received"))?;
- /// Returns the next partial JSON snapshot, or `None` when input is complete.
- /// Once this returns `None`, call `recv()` to get the final input.
- pub async fn recv_partial(&mut self) -> Option {
- self.partial_rx.next().await
+ Ok(match value {
+ ToolInputPayload::Partial(payload) => ToolInputPayload::Partial(payload),
+ ToolInputPayload::Full(payload) => {
+ ToolInputPayload::Full(serde_json::from_value(payload)?)
+ }
+ ToolInputPayload::InvalidJson { error_message } => {
+ ToolInputPayload::InvalidJson { error_message }
+ }
+ })
}
fn cast(self) -> ToolInput {
ToolInput {
- partial_rx: self.partial_rx,
- final_rx: self.final_rx,
+ rx: self.rx,
_phantom: PhantomData,
}
}
}
+pub enum ToolInputPayload {
+ Partial(serde_json::Value),
+ Full(T),
+ InvalidJson { error_message: String },
+}
+
pub struct ToolInputSender {
- partial_tx: mpsc::UnboundedSender,
- final_tx: Option>,
+ has_received_final: bool,
+ tx: mpsc::UnboundedSender>,
}
impl ToolInputSender {
pub(crate) fn channel() -> (Self, ToolInput) {
- let (partial_tx, partial_rx) = mpsc::unbounded();
- let (final_tx, final_rx) = oneshot::channel();
+ let (tx, rx) = mpsc::unbounded();
let sender = Self {
- partial_tx,
- final_tx: Some(final_tx),
+ tx,
+ has_received_final: false,
};
let input = ToolInput {
- partial_rx,
- final_rx,
+ rx,
_phantom: PhantomData,
};
(sender, input)
}
pub(crate) fn has_received_final(&self) -> bool {
- self.final_tx.is_none()
+ self.has_received_final
}
- pub(crate) fn send_partial(&self, value: serde_json::Value) {
- self.partial_tx.unbounded_send(value).ok();
+ pub fn send_partial(&mut self, payload: serde_json::Value) {
+ self.tx
+ .unbounded_send(ToolInputPayload::Partial(payload))
+ .ok();
}
- pub(crate) fn send_final(mut self, value: serde_json::Value) {
- // Close the partial channel so recv_partial() returns None
- self.partial_tx.close_channel();
- if let Some(final_tx) = self.final_tx.take() {
- final_tx.send(value).ok();
- }
+ pub fn send_full(&mut self, payload: serde_json::Value) {
+ self.has_received_final = true;
+ self.tx.unbounded_send(ToolInputPayload::Full(payload)).ok();
+ }
+
+ pub fn send_invalid_json(&mut self, error_message: String) {
+ self.has_received_final = true;
+ self.tx
+ .unbounded_send(ToolInputPayload::InvalidJson { error_message })
+ .ok();
}
}
@@ -4251,68 +4311,78 @@ mod tests {
) {
let (thread, event_stream) = setup_thread_for_test(cx).await;
- cx.update(|cx| {
- thread.update(cx, |thread, _cx| {
- let tool_use_id = LanguageModelToolUseId::from("test_tool_id");
- let tool_name: Arc = Arc::from("test_tool");
- let raw_input: Arc = Arc::from("{invalid json");
- let json_parse_error = "expected value at line 1 column 1".to_string();
-
- // Call the function under test
- let result = thread.handle_tool_use_json_parse_error_event(
- tool_use_id.clone(),
- tool_name.clone(),
- raw_input.clone(),
- json_parse_error,
- &event_stream,
- );
-
- // Verify the result is an error
- assert!(result.is_error);
- assert_eq!(result.tool_use_id, tool_use_id);
- assert_eq!(result.tool_name, tool_name);
- assert!(matches!(
- result.content,
- LanguageModelToolResultContent::Text(_)
- ));
-
- // Verify the tool use was added to the message content
- {
- let last_message = thread.pending_message();
- assert_eq!(
- last_message.content.len(),
- 1,
- "Should have one tool_use in content"
- );
-
- match &last_message.content[0] {
- AgentMessageContent::ToolUse(tool_use) => {
- assert_eq!(tool_use.id, tool_use_id);
- assert_eq!(tool_use.name, tool_name);
- assert_eq!(tool_use.raw_input, raw_input.to_string());
- assert!(tool_use.is_input_complete);
- // Should fall back to empty object for invalid JSON
- assert_eq!(tool_use.input, json!({}));
- }
- _ => panic!("Expected ToolUse content"),
- }
- }
-
- // Insert the tool result (simulating what the caller does)
- thread
- .pending_message()
- .tool_results
- .insert(result.tool_use_id.clone(), result);
+ let tool_use_id = LanguageModelToolUseId::from("test_tool_id");
+ let tool_name: Arc = Arc::from("test_tool");
+ let raw_input: Arc = Arc::from("{invalid json");
+ let json_parse_error = "expected value at line 1 column 1".to_string();
+
+ let (_cancellation_tx, cancellation_rx) = watch::channel(false);
+
+ let result = cx
+ .update(|cx| {
+ thread.update(cx, |thread, cx| {
+ // Call the function under test
+ thread
+ .handle_tool_use_json_parse_error_event(
+ tool_use_id.clone(),
+ tool_name.clone(),
+ raw_input.clone(),
+ json_parse_error,
+ &event_stream,
+ cancellation_rx,
+ cx,
+ )
+ .unwrap()
+ })
+ })
+ .await;
+
+ // Verify the result is an error
+ assert!(result.is_error);
+ assert_eq!(result.tool_use_id, tool_use_id);
+ assert_eq!(result.tool_name, tool_name);
+ assert!(matches!(
+ result.content,
+ LanguageModelToolResultContent::Text(_)
+ ));
- // Verify the tool result was added
+ thread.update(cx, |thread, _cx| {
+ // Verify the tool use was added to the message content
+ {
let last_message = thread.pending_message();
assert_eq!(
- last_message.tool_results.len(),
+ last_message.content.len(),
1,
- "Should have one tool_result"
+ "Should have one tool_use in content"
);
- assert!(last_message.tool_results.contains_key(&tool_use_id));
- });
- });
+
+ match &last_message.content[0] {
+ AgentMessageContent::ToolUse(tool_use) => {
+ assert_eq!(tool_use.id, tool_use_id);
+ assert_eq!(tool_use.name, tool_name);
+ assert_eq!(tool_use.raw_input, raw_input.to_string());
+ assert!(tool_use.is_input_complete);
+ // Should fall back to empty object for invalid JSON
+ assert_eq!(tool_use.input, json!({}));
+ }
+ _ => panic!("Expected ToolUse content"),
+ }
+ }
+
+ // Insert the tool result (simulating what the caller does)
+ thread
+ .pending_message()
+ .tool_results
+ .insert(result.tool_use_id.clone(), result);
+
+ // Verify the tool result was added
+ let last_message = thread.pending_message();
+ assert_eq!(
+ last_message.tool_results.len(),
+ 1,
+ "Should have one tool_result"
+ );
+ assert!(last_message.tool_results.contains_key(&tool_use_id));
+ })
}
}
diff --git a/crates/agent/src/tool_permissions.rs b/crates/agent/src/tool_permissions.rs
index e74b6e4c5ce34383ad7ea702f1ba3a0cfd028455..58e779da59aef176464839ed6f2d6a5c16e4bc12 100644
--- a/crates/agent/src/tool_permissions.rs
+++ b/crates/agent/src/tool_permissions.rs
@@ -563,7 +563,7 @@ mod tests {
use crate::tools::{DeletePathTool, EditFileTool, FetchTool, TerminalTool};
use agent_settings::{AgentProfileId, CompiledRegex, InvalidRegexPattern, ToolRules};
use gpui::px;
- use settings::{DockPosition, NotifyWhenAgentWaiting};
+ use settings::{DockPosition, NotifyWhenAgentWaiting, PlaySoundWhenAgentDone};
use std::sync::Arc;
fn test_agent_settings(tool_permissions: ToolPermissions) -> AgentSettings {
@@ -584,7 +584,7 @@ mod tests {
default_profile: AgentProfileId::default(),
profiles: Default::default(),
notify_when_agent_waiting: NotifyWhenAgentWaiting::default(),
- play_sound_when_agent_done: false,
+ play_sound_when_agent_done: PlaySoundWhenAgentDone::default(),
single_file_review: false,
model_parameters: vec![],
enable_feedback: false,
@@ -595,6 +595,7 @@ mod tests {
message_editor_min_lines: 1,
tool_permissions,
show_turn_stats: false,
+ show_merge_conflict_indicator: true,
new_thread_location: Default::default(),
sidebar_side: Default::default(),
thinking_display: Default::default(),
diff --git a/crates/agent/src/tools/copy_path_tool.rs b/crates/agent/src/tools/copy_path_tool.rs
index 95688f27dcd8ca04aef72358ce52144f95138e17..06600f64874851c8d703513ea006d7f0327a0952 100644
--- a/crates/agent/src/tools/copy_path_tool.rs
+++ b/crates/agent/src/tools/copy_path_tool.rs
@@ -383,8 +383,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -450,8 +450,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
diff --git a/crates/agent/src/tools/create_directory_tool.rs b/crates/agent/src/tools/create_directory_tool.rs
index d6c59bcce30ab26991edba0fa7181ec45d10e1b0..60bb44e39ee5ab76168d909c08889cbbbc63f9f4 100644
--- a/crates/agent/src/tools/create_directory_tool.rs
+++ b/crates/agent/src/tools/create_directory_tool.rs
@@ -370,8 +370,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -440,8 +440,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
diff --git a/crates/agent/src/tools/delete_path_tool.rs b/crates/agent/src/tools/delete_path_tool.rs
index 7433975c7b782a145dd3e5a80ee59cd92945a989..21b4674425d9169e7740dd35c929302814006684 100644
--- a/crates/agent/src/tools/delete_path_tool.rs
+++ b/crates/agent/src/tools/delete_path_tool.rs
@@ -439,8 +439,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -513,8 +513,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs
index 763efd6724a719b90af93843f203ef8c1c3976bb..9bcf164096b99675febd3d7ae1bde8341f7c5ff8 100644
--- a/crates/agent/src/tools/edit_file_tool.rs
+++ b/crates/agent/src/tools/edit_file_tool.rs
@@ -1188,7 +1188,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
// Test 4: Path with .zed in the middle should require confirmation
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
@@ -1251,7 +1251,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
// 5.3: Normal in-project path with allow — no confirmation needed
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
@@ -1268,7 +1268,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
// 5.4: With Confirm default, non-project paths still prompt
cx.update(|cx| {
@@ -1586,8 +1586,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- stream_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ stream_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -1658,7 +1658,7 @@ mod tests {
} else {
auth.await.unwrap();
assert!(
- stream_rx.try_next().is_err(),
+ stream_rx.try_recv().is_err(),
"Failed for case: {} - path: {} - expected no confirmation but got one",
description,
path
@@ -1769,7 +1769,7 @@ mod tests {
} else {
auth.await.unwrap();
assert!(
- stream_rx.try_next().is_err(),
+ stream_rx.try_recv().is_err(),
"Failed for case: {} - path: {} - expected no confirmation but got one",
description,
path
@@ -1862,7 +1862,7 @@ mod tests {
stream_rx.expect_authorization().await;
} else {
assert!(
- stream_rx.try_next().is_err(),
+ stream_rx.try_recv().is_err(),
"Failed for case: {} - path: {} - expected no confirmation but got one",
description,
path
@@ -1963,7 +1963,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
}
}
diff --git a/crates/agent/src/tools/evals/streaming_edit_file.rs b/crates/agent/src/tools/evals/streaming_edit_file.rs
index 6a55517037e54ae4166cd22427201d9325ef0f76..0c6290ec098f9c37a0f6a077daf0a041c013d8ff 100644
--- a/crates/agent/src/tools/evals/streaming_edit_file.rs
+++ b/crates/agent/src/tools/evals/streaming_edit_file.rs
@@ -6,7 +6,7 @@ use crate::{
};
use Role::*;
use anyhow::{Context as _, Result};
-use client::{Client, UserStore};
+use client::{Client, RefreshLlmTokenListener, UserStore};
use fs::FakeFs;
use futures::{FutureExt, StreamExt, future::LocalBoxFuture};
use gpui::{AppContext as _, AsyncApp, Entity, TestAppContext, UpdateGlobal as _};
@@ -274,7 +274,8 @@ impl StreamingEditToolTest {
cx.set_http_client(http_client);
let client = Client::production(cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
- language_model::init(user_store.clone(), client.clone(), cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
language_models::init(user_store, client, cx);
});
diff --git a/crates/agent/src/tools/list_directory_tool.rs b/crates/agent/src/tools/list_directory_tool.rs
index 7abbe1ed4c488210b9079e59765dddc8d5208bed..c88492bba40ee4fdfa928f153e49a302ad60be8b 100644
--- a/crates/agent/src/tools/list_directory_tool.rs
+++ b/crates/agent/src/tools/list_directory_tool.rs
@@ -982,13 +982,11 @@ mod tests {
"Expected private path validation error, got: {error}"
);
- let event = event_rx.try_next();
+ let event = event_rx.try_recv();
assert!(
!matches!(
event,
- Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(
- _
- ))))
+ Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_)))
),
"No authorization should be requested when validation fails before listing",
);
@@ -1030,13 +1028,11 @@ mod tests {
"Normal path should succeed without authorization"
);
- let event = event_rx.try_next();
+ let event = event_rx.try_recv();
assert!(
!matches!(
event,
- Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(
- _
- ))))
+ Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_)))
),
"No authorization should be requested for normal paths",
);
@@ -1087,13 +1083,11 @@ mod tests {
"Intra-project symlink should succeed without authorization: {result:?}",
);
- let event = event_rx.try_next();
+ let event = event_rx.try_recv();
assert!(
!matches!(
event,
- Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(
- _
- ))))
+ Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_)))
),
"No authorization should be requested for intra-project symlinks",
);
diff --git a/crates/agent/src/tools/move_path_tool.rs b/crates/agent/src/tools/move_path_tool.rs
index 147947bb67ec646c38b51f37dd75779ed78ec85b..eaea204d84d96ab841f2e075a42a1a42b827374d 100644
--- a/crates/agent/src/tools/move_path_tool.rs
+++ b/crates/agent/src/tools/move_path_tool.rs
@@ -390,8 +390,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -457,8 +457,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
diff --git a/crates/agent/src/tools/read_file_tool.rs b/crates/agent/src/tools/read_file_tool.rs
index 093a8580892cfc4cec0a061bcc10717b28c608f2..0086a82f4e79c9924502202873ceb2b25d2e66fb 100644
--- a/crates/agent/src/tools/read_file_tool.rs
+++ b/crates/agent/src/tools/read_file_tool.rs
@@ -1317,13 +1317,11 @@ mod test {
"Expected private-files validation error, got: {error}"
);
- let event = event_rx.try_next();
+ let event = event_rx.try_recv();
assert!(
!matches!(
event,
- Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(
- _
- ))))
+ Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_)))
),
"No authorization should be requested when validation fails before read",
);
diff --git a/crates/agent/src/tools/restore_file_from_disk_tool.rs b/crates/agent/src/tools/restore_file_from_disk_tool.rs
index 9273ea5b8bb041e0ea53f3ea72b94b46e5a7e294..b808a966cf983c92a5e93c19599ff5333ed70860 100644
--- a/crates/agent/src/tools/restore_file_from_disk_tool.rs
+++ b/crates/agent/src/tools/restore_file_from_disk_tool.rs
@@ -589,8 +589,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -662,8 +662,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
diff --git a/crates/agent/src/tools/save_file_tool.rs b/crates/agent/src/tools/save_file_tool.rs
index c6a1cd79db65127164fe66f966029b58a366da7f..0cf9666a415f8174e9036ebadf8368589294c885 100644
--- a/crates/agent/src/tools/save_file_tool.rs
+++ b/crates/agent/src/tools/save_file_tool.rs
@@ -584,8 +584,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -657,8 +657,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs
index 88ec1e67787ad6efbeaa46b83b9034a24b10d3db..47da35bbf25ad188f3f6b98e843b2955910bb7ac 100644
--- a/crates/agent/src/tools/streaming_edit_file_tool.rs
+++ b/crates/agent/src/tools/streaming_edit_file_tool.rs
@@ -2,6 +2,7 @@ use super::edit_file_tool::EditFileTool;
use super::restore_file_from_disk_tool::RestoreFileFromDiskTool;
use super::save_file_tool::SaveFileTool;
use super::tool_edit_parser::{ToolEditEvent, ToolEditParser};
+use crate::ToolInputPayload;
use crate::{
AgentTool, Thread, ToolCallEventStream, ToolInput,
edit_agent::{
@@ -12,7 +13,7 @@ use crate::{
use acp_thread::Diff;
use action_log::ActionLog;
use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields};
-use anyhow::{Context as _, Result};
+use anyhow::Result;
use collections::HashSet;
use futures::FutureExt as _;
use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity};
@@ -188,6 +189,10 @@ pub enum StreamingEditFileToolOutput {
},
Error {
error: String,
+ #[serde(default)]
+ input_path: Option,
+ #[serde(default)]
+ diff: String,
},
}
@@ -195,6 +200,8 @@ impl StreamingEditFileToolOutput {
pub fn error(error: impl Into) -> Self {
Self::Error {
error: error.into(),
+ input_path: None,
+ diff: String::new(),
}
}
}
@@ -215,7 +222,24 @@ impl std::fmt::Display for StreamingEditFileToolOutput {
)
}
}
- StreamingEditFileToolOutput::Error { error } => write!(f, "{error}"),
+ StreamingEditFileToolOutput::Error {
+ error,
+ diff,
+ input_path,
+ } => {
+ write!(f, "{error}\n")?;
+ if let Some(input_path) = input_path
+ && !diff.is_empty()
+ {
+ write!(
+ f,
+ "Edited {}:\n\n```diff\n{diff}\n```",
+ input_path.display()
+ )
+ } else {
+ write!(f, "No edits were made.")
+ }
+ }
}
}
}
@@ -233,6 +257,14 @@ pub struct StreamingEditFileTool {
language_registry: Arc,
}
+enum EditSessionResult {
+ Completed(EditSession),
+ Failed {
+ error: String,
+ session: Option,
+ },
+}
+
impl StreamingEditFileTool {
pub fn new(
project: Entity,
@@ -276,6 +308,158 @@ impl StreamingEditFileTool {
});
}
}
+
+ async fn ensure_buffer_saved(&self, buffer: &Entity, cx: &mut AsyncApp) {
+ let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| {
+ let settings = language_settings::LanguageSettings::for_buffer(buffer, cx);
+ settings.format_on_save != FormatOnSave::Off
+ });
+
+ if format_on_save_enabled {
+ self.project
+ .update(cx, |project, cx| {
+ project.format(
+ HashSet::from_iter([buffer.clone()]),
+ LspFormatTarget::Buffers,
+ false,
+ FormatTrigger::Save,
+ cx,
+ )
+ })
+ .await
+ .log_err();
+ }
+
+ self.project
+ .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
+ .await
+ .log_err();
+
+ self.action_log.update(cx, |log, cx| {
+ log.buffer_edited(buffer.clone(), cx);
+ });
+ }
+
+ async fn process_streaming_edits(
+ &self,
+ input: &mut ToolInput,
+ event_stream: &ToolCallEventStream,
+ cx: &mut AsyncApp,
+ ) -> EditSessionResult {
+ let mut session: Option = None;
+ let mut last_partial: Option = None;
+
+ loop {
+ futures::select! {
+ payload = input.next().fuse() => {
+ match payload {
+ Ok(payload) => match payload {
+ ToolInputPayload::Partial(partial) => {
+ if let Ok(parsed) = serde_json::from_value::(partial) {
+ let path_complete = parsed.path.is_some()
+ && parsed.path.as_ref() == last_partial.as_ref().and_then(|partial| partial.path.as_ref());
+
+ last_partial = Some(parsed.clone());
+
+ if session.is_none()
+ && path_complete
+ && let StreamingEditFileToolPartialInput {
+ path: Some(path),
+ display_description: Some(display_description),
+ mode: Some(mode),
+ ..
+ } = &parsed
+ {
+ match EditSession::new(
+ PathBuf::from(path),
+ display_description,
+ *mode,
+ self,
+ event_stream,
+ cx,
+ )
+ .await
+ {
+ Ok(created_session) => session = Some(created_session),
+ Err(error) => {
+ log::error!("Failed to create edit session: {}", error);
+ return EditSessionResult::Failed {
+ error,
+ session: None,
+ };
+ }
+ }
+ }
+
+ if let Some(current_session) = &mut session
+ && let Err(error) = current_session.process(parsed, self, event_stream, cx)
+ {
+ log::error!("Failed to process edit: {}", error);
+ return EditSessionResult::Failed { error, session };
+ }
+ }
+ }
+ ToolInputPayload::Full(full_input) => {
+ let mut session = if let Some(session) = session {
+ session
+ } else {
+ match EditSession::new(
+ full_input.path.clone(),
+ &full_input.display_description,
+ full_input.mode,
+ self,
+ event_stream,
+ cx,
+ )
+ .await
+ {
+ Ok(created_session) => created_session,
+ Err(error) => {
+ log::error!("Failed to create edit session: {}", error);
+ return EditSessionResult::Failed {
+ error,
+ session: None,
+ };
+ }
+ }
+ };
+
+ return match session.finalize(full_input, self, event_stream, cx).await {
+ Ok(()) => EditSessionResult::Completed(session),
+ Err(error) => {
+ log::error!("Failed to finalize edit: {}", error);
+ EditSessionResult::Failed {
+ error,
+ session: Some(session),
+ }
+ }
+ };
+ }
+ ToolInputPayload::InvalidJson { error_message } => {
+ log::error!("Received invalid JSON: {error_message}");
+ return EditSessionResult::Failed {
+ error: error_message,
+ session,
+ };
+ }
+ },
+ Err(error) => {
+ return EditSessionResult::Failed {
+ error: format!("Failed to receive tool input: {error}"),
+ session,
+ };
+ }
+ }
+ }
+ _ = event_stream.cancelled_by_user().fuse() => {
+ return EditSessionResult::Failed {
+ error: "Edit cancelled by user".to_string(),
+ session,
+ };
+ }
+ }
+ }
+ }
}
impl AgentTool for StreamingEditFileTool {
@@ -348,94 +532,40 @@ impl AgentTool for StreamingEditFileTool {
cx: &mut App,
) -> Task> {
cx.spawn(async move |cx: &mut AsyncApp| {
- let mut state: Option = None;
- let mut last_partial: Option = None;
- loop {
- futures::select! {
- partial = input.recv_partial().fuse() => {
- let Some(partial_value) = partial else { break };
- if let Ok(parsed) = serde_json::from_value::(partial_value) {
- let path_complete = parsed.path.is_some()
- && parsed.path.as_ref() == last_partial.as_ref().and_then(|p| p.path.as_ref());
-
- last_partial = Some(parsed.clone());
-
- if state.is_none()
- && path_complete
- && let StreamingEditFileToolPartialInput {
- path: Some(path),
- display_description: Some(display_description),
- mode: Some(mode),
- ..
- } = &parsed
- {
- match EditSession::new(
- &PathBuf::from(path),
- display_description,
- *mode,
- &self,
- &event_stream,
- cx,
- )
- .await
- {
- Ok(session) => state = Some(session),
- Err(e) => {
- log::error!("Failed to create edit session: {}", e);
- return Err(e);
- }
- }
- }
-
- if let Some(state) = &mut state {
- if let Err(e) = state.process(parsed, &self, &event_stream, cx) {
- log::error!("Failed to process edit: {}", e);
- return Err(e);
- }
- }
- }
- }
- _ = event_stream.cancelled_by_user().fuse() => {
- return Err(StreamingEditFileToolOutput::error("Edit cancelled by user"));
- }
- }
- }
- let full_input =
- input
- .recv()
- .await
- .map_err(|e| {
- let err = StreamingEditFileToolOutput::error(format!("Failed to receive tool input: {e}"));
- log::error!("Failed to receive tool input: {e}");
- err
- })?;
-
- let mut state = if let Some(state) = state {
- state
- } else {
- match EditSession::new(
- &full_input.path,
- &full_input.display_description,
- full_input.mode,
- &self,
- &event_stream,
- cx,
- )
+ match self
+ .process_streaming_edits(&mut input, &event_stream, cx)
.await
- {
- Ok(session) => session,
- Err(e) => {
- log::error!("Failed to create edit session: {}", e);
- return Err(e);
- }
+ {
+ EditSessionResult::Completed(session) => {
+ self.ensure_buffer_saved(&session.buffer, cx).await;
+ let (new_text, diff) = session.compute_new_text_and_diff(cx).await;
+ Ok(StreamingEditFileToolOutput::Success {
+ old_text: session.old_text.clone(),
+ new_text,
+ input_path: session.input_path,
+ diff,
+ })
}
- };
- match state.finalize(full_input, &self, &event_stream, cx).await {
- Ok(output) => Ok(output),
- Err(e) => {
- log::error!("Failed to finalize edit: {}", e);
- Err(e)
+ EditSessionResult::Failed {
+ error,
+ session: Some(session),
+ } => {
+ self.ensure_buffer_saved(&session.buffer, cx).await;
+ let (_new_text, diff) = session.compute_new_text_and_diff(cx).await;
+ Err(StreamingEditFileToolOutput::Error {
+ error,
+ input_path: Some(session.input_path),
+ diff,
+ })
}
+ EditSessionResult::Failed {
+ error,
+ session: None,
+ } => Err(StreamingEditFileToolOutput::Error {
+ error,
+ input_path: None,
+ diff: String::new(),
+ }),
}
})
}
@@ -472,6 +602,7 @@ impl AgentTool for StreamingEditFileTool {
pub struct EditSession {
abs_path: PathBuf,
+ input_path: PathBuf,
buffer: Entity,
old_text: Arc,
diff: Entity,
@@ -518,23 +649,21 @@ impl EditPipeline {
impl EditSession {
async fn new(
- path: &PathBuf,
+ path: PathBuf,
display_description: &str,
mode: StreamingEditFileMode,
tool: &StreamingEditFileTool,
event_stream: &ToolCallEventStream,
cx: &mut AsyncApp,
- ) -> Result {
- let project_path = cx
- .update(|cx| resolve_path(mode, &path, &tool.project, cx))
- .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?;
+ ) -> Result {
+ let project_path = cx.update(|cx| resolve_path(mode, &path, &tool.project, cx))?;
let Some(abs_path) = cx.update(|cx| tool.project.read(cx).absolute_path(&project_path, cx))
else {
- return Err(StreamingEditFileToolOutput::error(format!(
+ return Err(format!(
"Worktree at '{}' does not exist",
path.to_string_lossy()
- )));
+ ));
};
event_stream.update_fields(
@@ -543,13 +672,13 @@ impl EditSession {
cx.update(|cx| tool.authorize(&path, &display_description, event_stream, cx))
.await
- .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?;
+ .map_err(|e| e.to_string())?;
let buffer = tool
.project
.update(cx, |project, cx| project.open_buffer(project_path, cx))
.await
- .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?;
+ .map_err(|e| e.to_string())?;
ensure_buffer_saved(&buffer, &abs_path, tool, cx)?;
@@ -578,6 +707,7 @@ impl EditSession {
Ok(Self {
abs_path,
+ input_path: path,
buffer,
old_text,
diff,
@@ -594,22 +724,20 @@ impl EditSession {
tool: &StreamingEditFileTool,
event_stream: &ToolCallEventStream,
cx: &mut AsyncApp,
- ) -> Result {
- let old_text = self.old_text.clone();
-
+ ) -> Result<(), String> {
match input.mode {
StreamingEditFileMode::Write => {
- let content = input.content.ok_or_else(|| {
- StreamingEditFileToolOutput::error("'content' field is required for write mode")
- })?;
+ let content = input
+ .content
+ .ok_or_else(|| "'content' field is required for write mode".to_string())?;
let events = self.parser.finalize_content(&content);
self.process_events(&events, tool, event_stream, cx)?;
}
StreamingEditFileMode::Edit => {
- let edits = input.edits.ok_or_else(|| {
- StreamingEditFileToolOutput::error("'edits' field is required for edit mode")
- })?;
+ let edits = input
+ .edits
+ .ok_or_else(|| "'edits' field is required for edit mode".to_string())?;
let events = self.parser.finalize_edits(&edits);
self.process_events(&events, tool, event_stream, cx)?;
@@ -625,53 +753,15 @@ impl EditSession {
}
}
}
+ Ok(())
+ }
- let format_on_save_enabled = self.buffer.read_with(cx, |buffer, cx| {
- let settings = language_settings::LanguageSettings::for_buffer(buffer, cx);
- settings.format_on_save != FormatOnSave::Off
- });
-
- if format_on_save_enabled {
- tool.action_log.update(cx, |log, cx| {
- log.buffer_edited(self.buffer.clone(), cx);
- });
-
- let format_task = tool.project.update(cx, |project, cx| {
- project.format(
- HashSet::from_iter([self.buffer.clone()]),
- LspFormatTarget::Buffers,
- false,
- FormatTrigger::Save,
- cx,
- )
- });
- futures::select! {
- result = format_task.fuse() => { result.log_err(); },
- _ = event_stream.cancelled_by_user().fuse() => {
- return Err(StreamingEditFileToolOutput::error("Edit cancelled by user"));
- }
- };
- }
-
- let save_task = tool.project.update(cx, |project, cx| {
- project.save_buffer(self.buffer.clone(), cx)
- });
- futures::select! {
- result = save_task.fuse() => { result.map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; },
- _ = event_stream.cancelled_by_user().fuse() => {
- return Err(StreamingEditFileToolOutput::error("Edit cancelled by user"));
- }
- };
-
- tool.action_log.update(cx, |log, cx| {
- log.buffer_edited(self.buffer.clone(), cx);
- });
-
+ async fn compute_new_text_and_diff(&self, cx: &mut AsyncApp) -> (String, String) {
let new_snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
let (new_text, unified_diff) = cx
.background_spawn({
let new_snapshot = new_snapshot.clone();
- let old_text = old_text.clone();
+ let old_text = self.old_text.clone();
async move {
let new_text = new_snapshot.text();
let diff = language::unified_diff(&old_text, &new_text);
@@ -679,14 +769,7 @@ impl EditSession {
}
})
.await;
-
- let output = StreamingEditFileToolOutput::Success {
- input_path: input.path,
- new_text,
- old_text: old_text.clone(),
- diff: unified_diff,
- };
- Ok(output)
+ (new_text, unified_diff)
}
fn process(
@@ -695,7 +778,7 @@ impl EditSession {
tool: &StreamingEditFileTool,
event_stream: &ToolCallEventStream,
cx: &mut AsyncApp,
- ) -> Result<(), StreamingEditFileToolOutput> {
+ ) -> Result<(), String> {
match &self.mode {
StreamingEditFileMode::Write => {
if let Some(content) = &partial.content {
@@ -719,7 +802,7 @@ impl EditSession {
tool: &StreamingEditFileTool,
event_stream: &ToolCallEventStream,
cx: &mut AsyncApp,
- ) -> Result<(), StreamingEditFileToolOutput> {
+ ) -> Result<(), String> {
for event in events {
match event {
ToolEditEvent::ContentChunk { chunk } => {
@@ -760,7 +843,7 @@ impl EditSession {
{
if let Some(match_range) = matcher.push(chunk, None) {
let anchor_range = self.buffer.read_with(cx, |buffer, _cx| {
- buffer.anchor_range_between(match_range.clone())
+ buffer.anchor_range_outside(match_range.clone())
});
self.diff
.update(cx, |diff, cx| diff.reveal_range(anchor_range, cx));
@@ -795,7 +878,7 @@ impl EditSession {
let anchor_range = self
.buffer
- .read_with(cx, |buffer, _cx| buffer.anchor_range_between(range.clone()));
+ .read_with(cx, |buffer, _cx| buffer.anchor_range_outside(range.clone()));
self.diff
.update(cx, |diff, cx| diff.reveal_range(anchor_range, cx));
@@ -953,7 +1036,7 @@ fn apply_char_operations(
}
CharOperation::Delete { bytes } => {
let delete_end = *edit_cursor + bytes;
- let anchor_range = snapshot.anchor_range_around(*edit_cursor..delete_end);
+ let anchor_range = snapshot.anchor_range_inside(*edit_cursor..delete_end);
agent_edit_buffer(&buffer, [(anchor_range, "")], action_log, cx);
*edit_cursor = delete_end;
}
@@ -969,14 +1052,14 @@ fn extract_match(
buffer: &Entity,
edit_index: &usize,
cx: &mut AsyncApp,
-) -> Result, StreamingEditFileToolOutput> {
+) -> Result, String> {
match matches.len() {
- 0 => Err(StreamingEditFileToolOutput::error(format!(
+ 0 => Err(format!(
"Could not find matching text for edit at index {}. \
The old_text did not match any content in the file. \
Please read the file again to get the current content.",
edit_index,
- ))),
+ )),
1 => Ok(matches.into_iter().next().unwrap()),
_ => {
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
@@ -985,12 +1068,12 @@ fn extract_match(
.map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string())
.collect::>()
.join(", ");
- Err(StreamingEditFileToolOutput::error(format!(
+ Err(format!(
"Edit {} matched multiple locations in the file at lines: {}. \
Please provide more context in old_text to uniquely \
identify the location.",
edit_index, lines
- )))
+ ))
}
}
}
@@ -1022,7 +1105,7 @@ fn ensure_buffer_saved(
abs_path: &PathBuf,
tool: &StreamingEditFileTool,
cx: &mut AsyncApp,
-) -> Result<(), StreamingEditFileToolOutput> {
+) -> Result<(), String> {
let last_read_mtime = tool
.action_log
.read_with(cx, |log, _| log.file_read_time(abs_path));
@@ -1063,15 +1146,14 @@ fn ensure_buffer_saved(
then ask them to save or revert the file manually and inform you when it's ok to proceed."
}
};
- return Err(StreamingEditFileToolOutput::error(message));
+ return Err(message.to_string());
}
if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) {
if current != last_read {
- return Err(StreamingEditFileToolOutput::error(
- "The file has been modified since you last read it. \
- Please read the file again to get the current state before editing it.",
- ));
+ return Err("The file has been modified since you last read it. \
+ Please read the file again to get the current state before editing it."
+ .to_string());
}
}
@@ -1083,56 +1165,63 @@ fn resolve_path(
path: &PathBuf,
project: &Entity,
cx: &mut App,
-) -> Result {
+) -> Result {
let project = project.read(cx);
match mode {
StreamingEditFileMode::Edit => {
let path = project
.find_project_path(&path, cx)
- .context("Can't edit file: path not found")?;
+ .ok_or_else(|| "Can't edit file: path not found".to_string())?;
let entry = project
.entry_for_path(&path, cx)
- .context("Can't edit file: path not found")?;
+ .ok_or_else(|| "Can't edit file: path not found".to_string())?;
- anyhow::ensure!(entry.is_file(), "Can't edit file: path is a directory");
- Ok(path)
+ if entry.is_file() {
+ Ok(path)
+ } else {
+ Err("Can't edit file: path is a directory".to_string())
+ }
}
StreamingEditFileMode::Write => {
if let Some(path) = project.find_project_path(&path, cx)
&& let Some(entry) = project.entry_for_path(&path, cx)
{
- anyhow::ensure!(entry.is_file(), "Can't write to file: path is a directory");
- return Ok(path);
+ if entry.is_file() {
+ return Ok(path);
+ } else {
+ return Err("Can't write to file: path is a directory".to_string());
+ }
}
- let parent_path = path.parent().context("Can't create file: incorrect path")?;
+ let parent_path = path
+ .parent()
+ .ok_or_else(|| "Can't create file: incorrect path".to_string())?;
let parent_project_path = project.find_project_path(&parent_path, cx);
let parent_entry = parent_project_path
.as_ref()
.and_then(|path| project.entry_for_path(path, cx))
- .context("Can't create file: parent directory doesn't exist")?;
+ .ok_or_else(|| "Can't create file: parent directory doesn't exist")?;
- anyhow::ensure!(
- parent_entry.is_dir(),
- "Can't create file: parent is not a directory"
- );
+ if !parent_entry.is_dir() {
+ return Err("Can't create file: parent is not a directory".to_string());
+ }
let file_name = path
.file_name()
.and_then(|file_name| file_name.to_str())
.and_then(|file_name| RelPath::unix(file_name).ok())
- .context("Can't create file: invalid filename")?;
+ .ok_or_else(|| "Can't create file: invalid filename".to_string())?;
let new_file_path = parent_project_path.map(|parent| ProjectPath {
path: parent.path.join(file_name),
..parent
});
- new_file_path.context("Can't create file")
+ new_file_path.ok_or_else(|| "Can't create file".to_string())
}
}
}
@@ -1382,10 +1471,17 @@ mod tests {
})
.await;
- let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else {
+ let StreamingEditFileToolOutput::Error {
+ error,
+ diff,
+ input_path,
+ } = result.unwrap_err()
+ else {
panic!("expected error");
};
assert_eq!(error, "Can't edit file: path not found");
+ assert!(diff.is_empty());
+ assert_eq!(input_path, None);
}
#[gpui::test]
@@ -1411,7 +1507,7 @@ mod tests {
})
.await;
- let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else {
+ let StreamingEditFileToolOutput::Error { error, .. } = result.unwrap_err() else {
panic!("expected error");
};
assert!(
@@ -1424,7 +1520,7 @@ mod tests {
async fn test_streaming_early_buffer_open(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1447,7 +1543,7 @@ mod tests {
cx.run_until_parked();
// Now send the final complete input
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Edit lines",
"path": "root/file.txt",
"mode": "edit",
@@ -1465,7 +1561,7 @@ mod tests {
async fn test_streaming_path_completeness_heuristic(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "hello world"})).await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1485,7 +1581,7 @@ mod tests {
cx.run_until_parked();
// Send final
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Overwrite file",
"path": "root/file.txt",
"mode": "write",
@@ -1503,7 +1599,7 @@ mod tests {
async fn test_streaming_cancellation_during_partials(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "hello world"})).await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver, mut cancellation_tx) =
ToolCallEventStream::test_with_cancellation();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1521,7 +1617,7 @@ mod tests {
drop(sender);
let result = task.await;
- let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else {
+ let StreamingEditFileToolOutput::Error { error, .. } = result.unwrap_err() else {
panic!("expected error");
};
assert!(
@@ -1537,7 +1633,7 @@ mod tests {
json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}),
)
.await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1578,7 +1674,7 @@ mod tests {
cx.run_until_parked();
// Send final complete input
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Edit multiple lines",
"path": "root/file.txt",
"mode": "edit",
@@ -1601,7 +1697,7 @@ mod tests {
#[gpui::test]
async fn test_streaming_create_file_with_partials(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1625,7 +1721,7 @@ mod tests {
cx.run_until_parked();
// Final with full content
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Create new file",
"path": "root/dir/new_file.txt",
"mode": "write",
@@ -1643,12 +1739,12 @@ mod tests {
async fn test_streaming_no_partials_direct_final(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Send final immediately with no partials (simulates non-streaming path)
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Edit lines",
"path": "root/file.txt",
"mode": "edit",
@@ -1669,7 +1765,7 @@ mod tests {
json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}),
)
.await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1739,7 +1835,7 @@ mod tests {
);
// Send final complete input
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Edit multiple lines",
"path": "root/file.txt",
"mode": "edit",
@@ -1767,7 +1863,7 @@ mod tests {
async fn test_streaming_incremental_three_edits(cx: &mut TestAppContext) {
let (tool, project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "aaa\nbbb\nccc\nddd\neee\n"})).await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1835,7 +1931,7 @@ mod tests {
assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nCCC\nddd\nEEEeee\n"));
// Send final
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Edit three lines",
"path": "root/file.txt",
"mode": "edit",
@@ -1857,7 +1953,7 @@ mod tests {
async fn test_streaming_edit_failure_mid_stream(cx: &mut TestAppContext) {
let (tool, project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1893,16 +1989,17 @@ mod tests {
}));
cx.run_until_parked();
- // Verify edit 1 was applied
- let buffer_text = project.update(cx, |project, cx| {
+ let buffer = project.update(cx, |project, cx| {
let pp = project
.find_project_path(&PathBuf::from("root/file.txt"), cx)
.unwrap();
- project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text())
+ project.get_open_buffer(&pp, cx).unwrap()
});
+
+ // Verify edit 1 was applied
+ let buffer_text = buffer.read_with(cx, |buffer, _cx| buffer.text());
assert_eq!(
- buffer_text.as_deref(),
- Some("MODIFIED\nline 2\nline 3\n"),
+ buffer_text, "MODIFIED\nline 2\nline 3\n",
"First edit should be applied even though second edit will fail"
);
@@ -1925,20 +2022,32 @@ mod tests {
drop(sender);
let result = task.await;
- let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else {
+ let StreamingEditFileToolOutput::Error {
+ error,
+ diff,
+ input_path,
+ } = result.unwrap_err()
+ else {
panic!("expected error");
};
+
assert!(
error.contains("Could not find matching text for edit at index 1"),
"Expected error about edit 1 failing, got: {error}"
);
+ // Ensure that first edit was applied successfully and that we saved the buffer
+ assert_eq!(input_path, Some(PathBuf::from("root/file.txt")));
+ assert_eq!(
+ diff,
+ "@@ -1,3 +1,3 @@\n-line 1\n+MODIFIED\n line 2\n line 3\n"
+ );
}
#[gpui::test]
async fn test_streaming_single_edit_no_incremental(cx: &mut TestAppContext) {
let (tool, project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "hello world\n"})).await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1975,7 +2084,7 @@ mod tests {
);
// Send final — the edit is applied during finalization
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Single edit",
"path": "root/file.txt",
"mode": "edit",
@@ -1993,7 +2102,7 @@ mod tests {
async fn test_streaming_input_partials_then_final(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await;
- let (sender, input): (ToolInputSender, ToolInput) =
+ let (mut sender, input): (ToolInputSender, ToolInput) =
ToolInput::test();
let (event_stream, _event_rx) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -2020,7 +2129,7 @@ mod tests {
cx.run_until_parked();
// Send the final complete input
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Edit lines",
"path": "root/file.txt",
"mode": "edit",
@@ -2038,7 +2147,7 @@ mod tests {
async fn test_streaming_input_sender_dropped_before_final(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "hello world\n"})).await;
- let (sender, input): (ToolInputSender, ToolInput) =
+ let (mut sender, input): (ToolInputSender, ToolInput) =
ToolInput::test();
let (event_stream, _event_rx) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -2064,7 +2173,7 @@ mod tests {
// Create a channel and send multiple partials before a final, then use
// ToolInput::resolved-style immediate delivery to confirm recv() works
// when partials are already buffered.
- let (sender, input): (ToolInputSender, ToolInput) =
+ let (mut sender, input): (ToolInputSender, ToolInput) =
ToolInput::test();
let (event_stream, _event_rx) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -2077,7 +2186,7 @@ mod tests {
"path": "root/dir/new.txt",
"mode": "write"
}));
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Create",
"path": "root/dir/new.txt",
"mode": "write",
@@ -2109,13 +2218,13 @@ mod tests {
let result = test_resolve_path(&mode, "root/dir/subdir", cx);
assert_eq!(
- result.await.unwrap_err().to_string(),
+ result.await.unwrap_err(),
"Can't write to file: path is a directory"
);
let result = test_resolve_path(&mode, "root/dir/nonexistent_dir/new.txt", cx);
assert_eq!(
- result.await.unwrap_err().to_string(),
+ result.await.unwrap_err(),
"Can't create file: parent directory doesn't exist"
);
}
@@ -2133,14 +2242,11 @@ mod tests {
assert_resolved_path_eq(result.await, rel_path(path_without_root));
let result = test_resolve_path(&mode, "root/nonexistent.txt", cx);
- assert_eq!(
- result.await.unwrap_err().to_string(),
- "Can't edit file: path not found"
- );
+ assert_eq!(result.await.unwrap_err(), "Can't edit file: path not found");
let result = test_resolve_path(&mode, "root/dir", cx);
assert_eq!(
- result.await.unwrap_err().to_string(),
+ result.await.unwrap_err(),
"Can't edit file: path is a directory"
);
}
@@ -2149,7 +2255,7 @@ mod tests {
mode: &StreamingEditFileMode,
path: &str,
cx: &mut TestAppContext,
- ) -> anyhow::Result {
+ ) -> Result {
init_test(cx);
let fs = project::FakeFs::new(cx.executor());
@@ -2170,7 +2276,7 @@ mod tests {
}
#[track_caller]
- fn assert_resolved_path_eq(path: anyhow::Result, expected: &RelPath) {
+ fn assert_resolved_path_eq(path: Result, expected: &RelPath) {
let actual = path.expect("Should return valid path").path;
assert_eq!(actual.as_ref(), expected);
}
@@ -2259,7 +2365,7 @@ mod tests {
});
// Use streaming pattern so executor can pump the LSP request/response
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -2271,7 +2377,7 @@ mod tests {
}));
cx.run_until_parked();
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Create main function",
"path": "root/src/main.rs",
"mode": "write",
@@ -2310,7 +2416,7 @@ mod tests {
});
});
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let tool2 = Arc::new(StreamingEditFileTool::new(
@@ -2329,7 +2435,7 @@ mod tests {
}));
cx.run_until_parked();
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Update main function",
"path": "root/src/main.rs",
"mode": "write",
@@ -2493,7 +2599,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
// Test 4: Path with .zed in the middle should require confirmation
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
@@ -2540,7 +2646,7 @@ mod tests {
cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 5.2", &stream_tx, cx))
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
// 5.3: Normal in-project path with allow — no confirmation needed
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
@@ -2554,7 +2660,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
// 5.4: With Confirm default, non-project paths still prompt
cx.update(|cx| {
@@ -2767,8 +2873,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- stream_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ stream_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -2810,7 +2916,7 @@ mod tests {
} else {
auth.await.unwrap();
assert!(
- stream_rx.try_next().is_err(),
+ stream_rx.try_recv().is_err(),
"Failed for case: {} - path: {} - expected no confirmation but got one",
description,
path
@@ -2887,7 +2993,7 @@ mod tests {
} else {
auth.await.unwrap();
assert!(
- stream_rx.try_next().is_err(),
+ stream_rx.try_recv().is_err(),
"Failed for case: {} - path: {} - expected no confirmation but got one",
description,
path
@@ -2947,7 +3053,7 @@ mod tests {
stream_rx.expect_authorization().await;
} else {
assert!(
- stream_rx.try_next().is_err(),
+ stream_rx.try_recv().is_err(),
"Failed for case: {} - path: {} - expected no confirmation but got one",
description,
path
@@ -3015,7 +3121,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
}
}
@@ -3288,14 +3394,22 @@ mod tests {
})
.await;
- let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else {
+ let StreamingEditFileToolOutput::Error {
+ error,
+ diff,
+ input_path,
+ } = result.unwrap_err()
+ else {
panic!("expected error");
};
+
assert!(
error.contains("has been modified since you last read it"),
"Error should mention file modification, got: {}",
error
);
+ assert!(diff.is_empty());
+ assert!(input_path.is_none());
}
#[gpui::test]
@@ -3362,7 +3476,12 @@ mod tests {
})
.await;
- let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else {
+ let StreamingEditFileToolOutput::Error {
+ error,
+ diff,
+ input_path,
+ } = result.unwrap_err()
+ else {
panic!("expected error");
};
assert!(
@@ -3380,6 +3499,8 @@ mod tests {
"Error should ask user to manually save or revert when tools aren't available, got: {}",
error
);
+ assert!(diff.is_empty());
+ assert!(input_path.is_none());
}
#[gpui::test]
@@ -3390,7 +3511,7 @@ mod tests {
// the modified buffer and succeeds.
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "aaa\nbbb\nccc\nddd\neee\n"})).await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -3420,7 +3541,7 @@ mod tests {
cx.run_until_parked();
// Send the final input with all three edits.
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Overlapping edits",
"path": "root/file.txt",
"mode": "edit",
@@ -3441,7 +3562,7 @@ mod tests {
#[gpui::test]
async fn test_streaming_create_content_streamed(cx: &mut TestAppContext) {
let (tool, project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -3495,7 +3616,7 @@ mod tests {
);
// Send final input
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Create new file",
"path": "root/dir/new_file.txt",
"mode": "write",
@@ -3516,7 +3637,7 @@ mod tests {
json!({"file.txt": "old line 1\nold line 2\nold line 3\n"}),
)
.await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, mut receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -3559,7 +3680,7 @@ mod tests {
});
// Send final input
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Overwrite file",
"path": "root/file.txt",
"mode": "write",
@@ -3587,7 +3708,7 @@ mod tests {
json!({"file.txt": "old line 1\nold line 2\nold line 3\n"}),
)
.await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -3634,7 +3755,7 @@ mod tests {
);
// Send final input with complete content
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Overwrite file",
"path": "root/file.txt",
"mode": "write",
@@ -3656,7 +3777,7 @@ mod tests {
async fn test_streaming_edit_json_fixer_escape_corruption(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "hello\nworld\nfoo\n"})).await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -3690,7 +3811,7 @@ mod tests {
cx.run_until_parked();
// Send final.
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Edit",
"path": "root/file.txt",
"mode": "edit",
@@ -3708,7 +3829,7 @@ mod tests {
async fn test_streaming_final_input_stringified_edits_succeeds(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "hello\nworld\n"})).await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -3719,7 +3840,7 @@ mod tests {
}));
cx.run_until_parked();
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Edit",
"path": "root/file.txt",
"mode": "edit",
@@ -3823,7 +3944,7 @@ mod tests {
) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "old_content"})).await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -3849,7 +3970,7 @@ mod tests {
cx.run_until_parked();
// Send final.
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Overwrite file",
"mode": "write",
"content": "new_content",
@@ -3869,7 +3990,7 @@ mod tests {
) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "old_content"})).await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -3902,7 +4023,7 @@ mod tests {
cx.run_until_parked();
// Send final.
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Overwrite file",
"mode": "edit",
"edits": [{"old_text": "old_content", "new_text": "new_content"}],
@@ -3939,11 +4060,11 @@ mod tests {
let old_text = "}\n\n\n\nfn render_search";
let new_text = "}\n\nfn render_search";
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Remove extra blank lines",
"path": "root/file.rs",
"mode": "edit",
@@ -3980,11 +4101,11 @@ mod tests {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.rs": file_content})).await;
- let (sender, input) = ToolInput::::test();
+ let (mut sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "description",
"path": "root/file.rs",
"mode": "edit",
diff --git a/crates/agent/src/tools/terminal_tool.rs b/crates/agent/src/tools/terminal_tool.rs
index 82bf9a06480bb7d6db3611516281f42452ec5137..f36bd0fe3d3fb00931a7dc272d76eb042f6570f6 100644
--- a/crates/agent/src/tools/terminal_tool.rs
+++ b/crates/agent/src/tools/terminal_tool.rs
@@ -681,17 +681,17 @@ mod tests {
);
assert!(
!matches!(
- rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"invalid command should not request authorization"
);
assert!(
!matches!(
- rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallUpdate(
+ rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallUpdate(
acp_thread::ToolCallUpdate::UpdateFields(_)
- ))))
+ )))
),
"invalid command should not emit a terminal card update"
);
@@ -810,8 +810,8 @@ mod tests {
);
assert!(
!matches!(
- rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"hardcoded denial should not request authorization"
);
@@ -1058,8 +1058,8 @@ mod tests {
);
assert!(
!matches!(
- rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"rejected command {command:?} should not request authorization"
);
diff --git a/crates/agent_servers/Cargo.toml b/crates/agent_servers/Cargo.toml
index 1542466be35bbce80983a73a3fc2e0998799160c..7151f0084b1cb7d9b206f57551ce715ef67483f7 100644
--- a/crates/agent_servers/Cargo.toml
+++ b/crates/agent_servers/Cargo.toml
@@ -32,7 +32,6 @@ futures.workspace = true
gpui.workspace = true
feature_flags.workspace = true
gpui_tokio = { workspace = true, optional = true }
-credentials_provider.workspace = true
google_ai.workspace = true
http_client.workspace = true
indoc.workspace = true
@@ -53,6 +52,7 @@ terminal.workspace = true
uuid.workspace = true
util.workspace = true
watch.workspace = true
+zed_credentials_provider.workspace = true
[target.'cfg(unix)'.dependencies]
libc.workspace = true
diff --git a/crates/agent_servers/src/custom.rs b/crates/agent_servers/src/custom.rs
index 0dcd2240d6ecf6dc052cdd55953cff8ec1442eae..fb8d0a515244576d2cf02e4989cbd71beca448c7 100644
--- a/crates/agent_servers/src/custom.rs
+++ b/crates/agent_servers/src/custom.rs
@@ -3,7 +3,6 @@ use acp_thread::AgentConnection;
use agent_client_protocol as acp;
use anyhow::{Context as _, Result};
use collections::HashSet;
-use credentials_provider::CredentialsProvider;
use fs::Fs;
use gpui::{App, AppContext as _, Entity, Task};
use language_model::{ApiKey, EnvVar};
@@ -392,7 +391,7 @@ fn api_key_for_gemini_cli(cx: &mut App) -> Task> {
if let Some(key) = env_var.value {
return Task::ready(Ok(key));
}
- let credentials_provider = ::global(cx);
+ let credentials_provider = zed_credentials_provider::global(cx);
let api_url = google_ai::API_URL.to_string();
cx.spawn(async move |cx| {
Ok(
diff --git a/crates/agent_servers/src/e2e_tests.rs b/crates/agent_servers/src/e2e_tests.rs
index 956d106df2a260bd2eb31c14f4f1f1705bf74cd6..aa29a0c230c13949b15f2b39a245ae41ead4884d 100644
--- a/crates/agent_servers/src/e2e_tests.rs
+++ b/crates/agent_servers/src/e2e_tests.rs
@@ -1,6 +1,7 @@
use crate::{AgentServer, AgentServerDelegate};
use acp_thread::{AcpThread, AgentThreadEntry, ToolCall, ToolCallStatus};
use agent_client_protocol as acp;
+use client::RefreshLlmTokenListener;
use futures::{FutureExt, StreamExt, channel::mpsc, select};
use gpui::AppContext;
use gpui::{Entity, TestAppContext};
@@ -413,7 +414,8 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc {
cx.set_http_client(Arc::new(http_client));
let client = client::Client::production(cx);
let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx));
- language_model::init(user_store, client, cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store, cx);
#[cfg(test)]
project::agent_server_store::AllAgentServersSettings::override_global(
diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs
index 2ef65fe33641cdeca1a77642251523275511e81f..0c68d2f25d54f966d1cc0a93476457bbba79c959 100644
--- a/crates/agent_settings/src/agent_settings.rs
+++ b/crates/agent_settings/src/agent_settings.rs
@@ -13,8 +13,8 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{
DockPosition, DockSide, LanguageModelParameters, LanguageModelSelection, NewThreadLocation,
- NotifyWhenAgentWaiting, RegisterSetting, Settings, SettingsContent, SettingsStore,
- SidebarDockPosition, SidebarSide, ThinkingBlockDisplay, ToolPermissionMode,
+ NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, RegisterSetting, Settings, SettingsContent,
+ SettingsStore, SidebarDockPosition, SidebarSide, ThinkingBlockDisplay, ToolPermissionMode,
update_settings_file,
};
@@ -165,7 +165,7 @@ pub struct AgentSettings {
pub profiles: IndexMap,
pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
- pub play_sound_when_agent_done: bool,
+ pub play_sound_when_agent_done: PlaySoundWhenAgentDone,
pub single_file_review: bool,
pub model_parameters: Vec,
pub enable_feedback: bool,
@@ -176,6 +176,7 @@ pub struct AgentSettings {
pub use_modifier_to_send: bool,
pub message_editor_min_lines: usize,
pub show_turn_stats: bool,
+ pub show_merge_conflict_indicator: bool,
pub tool_permissions: ToolPermissions,
pub new_thread_location: NewThreadLocation,
}
@@ -618,7 +619,7 @@ impl Settings for AgentSettings {
.collect(),
notify_when_agent_waiting: agent.notify_when_agent_waiting.unwrap(),
- play_sound_when_agent_done: agent.play_sound_when_agent_done.unwrap(),
+ play_sound_when_agent_done: agent.play_sound_when_agent_done.unwrap_or_default(),
single_file_review: agent.single_file_review.unwrap(),
model_parameters: agent.model_parameters,
enable_feedback: agent.enable_feedback.unwrap(),
@@ -629,6 +630,7 @@ impl Settings for AgentSettings {
use_modifier_to_send: agent.use_modifier_to_send.unwrap(),
message_editor_min_lines: agent.message_editor_min_lines.unwrap(),
show_turn_stats: agent.show_turn_stats.unwrap(),
+ show_merge_conflict_indicator: agent.show_merge_conflict_indicator.unwrap(),
tool_permissions: compile_tool_permissions(agent.tool_permissions),
new_thread_location: agent.new_thread_location.unwrap_or_default(),
}
diff --git a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs
index 4e3dd63b0337f9be54b550f4f4a6a5ca2e7cdd42..e0df79ba4dfe226652818b120b7bfcc493c73b1e 100644
--- a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs
+++ b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs
@@ -202,6 +202,7 @@ impl ModelInput {
.text(cx)
.parse::()
.map_err(|_| SharedString::from("Max Tokens must be a number"))?,
+ reasoning_effort: None,
capabilities: ModelCapabilities {
tools: self.capabilities.supports_tools.selected(),
images: self.capabilities.supports_images.selected(),
@@ -815,7 +816,7 @@ mod tests {
cx.set_global(store);
theme_settings::init(theme::LoadThemes::JustBase, cx);
- language_model::init_settings(cx);
+ language_model::init(cx);
editor::init(cx);
});
diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs
index 2e709c0be3297e270119c048c7b8e25e7958ee69..7b70740dd1ac462614a9d08d9e48d7d13ac2ed32 100644
--- a/crates/agent_ui/src/agent_diff.rs
+++ b/crates/agent_ui/src/agent_diff.rs
@@ -138,11 +138,12 @@ impl AgentDiffPane {
path_a.cmp(&path_b)
});
- let mut paths_to_delete = self
+ let mut buffers_to_delete = self
.multibuffer
.read(cx)
- .paths()
- .cloned()
+ .snapshot(cx)
+ .excerpts()
+ .map(|excerpt| excerpt.context.start.buffer_id)
.collect::>();
for (buffer, diff_handle) in sorted_buffers {
@@ -151,7 +152,7 @@ impl AgentDiffPane {
}
let path_key = PathKey::for_buffer(&buffer, cx);
- paths_to_delete.remove(&path_key);
+ buffers_to_delete.remove(&buffer.read(cx).remote_id());
let snapshot = buffer.read(cx).snapshot();
@@ -168,7 +169,7 @@ impl AgentDiffPane {
let (was_empty, is_excerpt_newly_added) =
self.multibuffer.update(cx, |multibuffer, cx| {
let was_empty = multibuffer.is_empty();
- let (_, is_excerpt_newly_added) = multibuffer.set_excerpts_for_path(
+ let is_excerpt_newly_added = multibuffer.update_excerpts_for_path(
path_key.clone(),
buffer.clone(),
diff_hunk_ranges,
@@ -183,13 +184,13 @@ impl AgentDiffPane {
if was_empty {
let first_hunk = editor
.diff_hunks_in_ranges(
- &[editor::Anchor::min()..editor::Anchor::max()],
+ &[editor::Anchor::Min..editor::Anchor::Max],
&self.multibuffer.read(cx).read(cx),
)
.next();
if let Some(first_hunk) = first_hunk {
- let first_hunk_start = first_hunk.multi_buffer_range().start;
+ let first_hunk_start = first_hunk.multi_buffer_range.start;
editor.change_selections(Default::default(), window, cx, |selections| {
selections.select_anchor_ranges([first_hunk_start..first_hunk_start]);
})
@@ -208,8 +209,8 @@ impl AgentDiffPane {
}
self.multibuffer.update(cx, |multibuffer, cx| {
- for path in paths_to_delete {
- multibuffer.remove_excerpts_for_path(path, cx);
+ for buffer_id in buffers_to_delete {
+ multibuffer.remove_excerpts_for_buffer(buffer_id, cx);
}
});
@@ -239,13 +240,13 @@ impl AgentDiffPane {
self.editor.update(cx, |editor, cx| {
let first_hunk = editor
.diff_hunks_in_ranges(
- &[position..editor::Anchor::max()],
+ &[position..editor::Anchor::Max],
&self.multibuffer.read(cx).read(cx),
)
.next();
if let Some(first_hunk) = first_hunk {
- let first_hunk_start = first_hunk.multi_buffer_range().start;
+ let first_hunk_start = first_hunk.multi_buffer_range.start;
editor.change_selections(Default::default(), window, cx, |selections| {
selections.select_anchor_ranges([first_hunk_start..first_hunk_start]);
})
@@ -282,7 +283,7 @@ impl AgentDiffPane {
editor,
&snapshot,
&self.thread,
- vec![editor::Anchor::min()..editor::Anchor::max()],
+ vec![editor::Anchor::Min..editor::Anchor::Max],
self.workspace.clone(),
window,
cx,
@@ -451,20 +452,20 @@ fn update_editor_selection(
diff_hunks
.last()
.and_then(|last_kept_hunk| {
- let last_kept_hunk_end = last_kept_hunk.multi_buffer_range().end;
+ let last_kept_hunk_end = last_kept_hunk.multi_buffer_range.end;
editor
.diff_hunks_in_ranges(
- &[last_kept_hunk_end..editor::Anchor::max()],
+ &[last_kept_hunk_end..editor::Anchor::Max],
buffer_snapshot,
)
.nth(1)
})
.or_else(|| {
let first_kept_hunk = diff_hunks.first()?;
- let first_kept_hunk_start = first_kept_hunk.multi_buffer_range().start;
+ let first_kept_hunk_start = first_kept_hunk.multi_buffer_range.start;
editor
.diff_hunks_in_ranges(
- &[editor::Anchor::min()..first_kept_hunk_start],
+ &[editor::Anchor::Min..first_kept_hunk_start],
buffer_snapshot,
)
.next()
@@ -473,7 +474,7 @@ fn update_editor_selection(
if let Some(target_hunk) = target_hunk {
editor.change_selections(Default::default(), window, cx, |selections| {
- let next_hunk_start = target_hunk.multi_buffer_range().start;
+ let next_hunk_start = target_hunk.multi_buffer_range.start;
selections.select_anchor_ranges([next_hunk_start..next_hunk_start]);
})
}
@@ -1567,7 +1568,7 @@ impl AgentDiff {
editor.update(cx, |editor, cx| {
let snapshot = multibuffer.read(cx).snapshot(cx);
if let Some(first_hunk) = snapshot.diff_hunks().next() {
- let first_hunk_start = first_hunk.multi_buffer_range().start;
+ let first_hunk_start = first_hunk.multi_buffer_range.start;
editor.change_selections(
SelectionEffects::scroll(Autoscroll::center()),
@@ -1648,7 +1649,7 @@ impl AgentDiff {
editor,
&snapshot,
thread,
- vec![editor::Anchor::min()..editor::Anchor::max()],
+ vec![editor::Anchor::Min..editor::Anchor::Max],
window,
cx,
);
@@ -1669,7 +1670,7 @@ impl AgentDiff {
editor,
&snapshot,
thread,
- vec![editor::Anchor::min()..editor::Anchor::max()],
+ vec![editor::Anchor::Min..editor::Anchor::Max],
workspace.clone(),
window,
cx,
@@ -1808,7 +1809,7 @@ mod tests {
cx.set_global(settings_store);
prompt_store::init(cx);
theme_settings::init(theme::LoadThemes::JustBase, cx);
- language_model::init_settings(cx);
+ language_model::init(cx);
});
let fs = FakeFs::new(cx.executor());
@@ -1965,7 +1966,7 @@ mod tests {
cx.set_global(settings_store);
prompt_store::init(cx);
theme_settings::init(theme::LoadThemes::JustBase, cx);
- language_model::init_settings(cx);
+ language_model::init(cx);
workspace::register_project_item::(cx);
});
diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs
index 0ed0aeb78bf8889136a479ed2dac5caba633db55..41900e71e5d3ad7e5327ee7e04f73cb05eed5a5b 100644
--- a/crates/agent_ui/src/agent_panel.rs
+++ b/crates/agent_ui/src/agent_panel.rs
@@ -25,6 +25,7 @@ use zed_actions::agent::{
ResolveConflictsWithAgent, ReviewBranchDiff,
};
+use crate::thread_metadata_store::ThreadMetadataStore;
use crate::{
AddContextServer, AgentDiffPane, ConversationView, CopyThreadToClipboard, CycleStartThreadIn,
Follow, InlineAssistant, LoadThreadFromClipboard, NewThread, OpenActiveThreadAsMarkdown,
@@ -753,28 +754,21 @@ impl AgentPanel {
.as_ref()
.and_then(|p| p.last_active_thread.as_ref())
{
- if thread_info.agent_type.is_native() {
- let session_id = acp::SessionId::new(thread_info.session_id.clone());
- let load_result = cx.update(|_window, cx| {
- let thread_store = ThreadStore::global(cx);
- thread_store.update(cx, |store, cx| store.load_thread(session_id, cx))
- });
- let thread_exists = if let Ok(task) = load_result {
- task.await.ok().flatten().is_some()
- } else {
- false
- };
- if thread_exists {
- Some(thread_info)
- } else {
- log::warn!(
- "last active thread {} not found in database, skipping restoration",
- thread_info.session_id
- );
- None
- }
- } else {
+ let session_id = acp::SessionId::new(thread_info.session_id.clone());
+ let has_metadata = cx
+ .update(|_window, cx| {
+ let store = ThreadMetadataStore::global(cx);
+ store.read(cx).entry(&session_id).is_some()
+ })
+ .unwrap_or(false);
+ if has_metadata {
Some(thread_info)
+ } else {
+ log::warn!(
+ "last active thread {} has no metadata, skipping restoration",
+ thread_info.session_id
+ );
+ None
}
} else {
None
@@ -1734,6 +1728,10 @@ impl AgentPanel {
return;
};
+ if thread_view.read(cx).thread.read(cx).entries().is_empty() {
+ return;
+ }
+
self.background_threads
.insert(thread_view.read(cx).id.clone(), conversation_view);
self.cleanup_background_threads(cx);
@@ -2078,6 +2076,10 @@ impl AgentPanel {
window: &mut Window,
cx: &mut Context,
) {
+ if let Some(store) = ThreadMetadataStore::try_global(cx) {
+ store.update(cx, |store, cx| store.unarchive(&session_id, cx));
+ }
+
if let Some(conversation_view) = self.background_threads.remove(&session_id) {
self.set_active_view(
ActiveView::AgentThread { conversation_view },
@@ -2588,7 +2590,7 @@ impl AgentPanel {
anyhow::Ok(())
});
- self._worktree_creation_task = Some(cx.foreground_executor().spawn(async move {
+ self._worktree_creation_task = Some(cx.background_spawn(async move {
task.await.log_err();
}));
}
@@ -2745,6 +2747,10 @@ impl AgentPanel {
new_window_handle.update(cx, |multi_workspace, window, cx| {
multi_workspace.activate(new_workspace.clone(), window, cx);
+
+ new_workspace.update(cx, |workspace, cx| {
+ workspace.run_create_worktree_tasks(window, cx);
+ })
})?;
this.update_in(cx, |this, window, cx| {
@@ -4297,6 +4303,8 @@ mod tests {
);
});
+ send_message(&panel_a, cx);
+
let agent_type_a = panel_a.read_with(cx, |panel, _cx| panel.selected_agent.clone());
// --- Set up workspace B: ClaudeCode, no active thread ---
@@ -4356,6 +4364,72 @@ mod tests {
});
}
+ #[gpui::test]
+ async fn test_non_native_thread_without_metadata_is_not_restored(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.update(|cx| {
+ cx.update_flags(true, vec!["agent-v2".to_string()]);
+ agent::ThreadStore::init_global(cx);
+ language_model::LanguageModelRegistry::test(cx);
+ });
+
+ let fs = FakeFs::new(cx.executor());
+ let project = Project::test(fs, [], cx).await;
+
+ let multi_workspace =
+ cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+
+ let workspace = multi_workspace
+ .read_with(cx, |multi_workspace, _cx| {
+ multi_workspace.workspace().clone()
+ })
+ .unwrap();
+
+ workspace.update(cx, |workspace, _cx| {
+ workspace.set_random_database_id();
+ });
+
+ let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx);
+
+ let panel = workspace.update_in(cx, |workspace, window, cx| {
+ cx.new(|cx| AgentPanel::new(workspace, None, window, cx))
+ });
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.open_external_thread_with_server(
+ Rc::new(StubAgentServer::default_response()),
+ window,
+ cx,
+ );
+ });
+
+ cx.run_until_parked();
+
+ panel.read_with(cx, |panel, cx| {
+ assert!(
+ panel.active_agent_thread(cx).is_some(),
+ "should have an active thread after connection"
+ );
+ });
+
+ // Serialize without ever sending a message, so no thread metadata exists.
+ panel.update(cx, |panel, cx| panel.serialize(cx));
+ cx.run_until_parked();
+
+ let async_cx = cx.update(|window, cx| window.to_async(cx));
+ let loaded = AgentPanel::load(workspace.downgrade(), async_cx)
+ .await
+ .expect("panel load should succeed");
+ cx.run_until_parked();
+
+ loaded.read_with(cx, |panel, _cx| {
+ assert!(
+ panel.active_conversation_view().is_none(),
+ "thread without metadata should not be restored"
+ );
+ });
+ }
+
/// Extracts the text from a Text content block, panicking if it's not Text.
fn expect_text_block(block: &acp::ContentBlock) -> &str {
match block {
@@ -4698,6 +4772,38 @@ mod tests {
(panel, cx)
}
+ #[gpui::test]
+ async fn test_empty_draft_thread_not_retained_when_navigating_away(cx: &mut TestAppContext) {
+ let (panel, mut cx) = setup_panel(cx).await;
+
+ let connection_a = StubAgentConnection::new();
+ open_thread_with_connection(&panel, connection_a, &mut cx);
+ let session_id_a = active_session_id(&panel, &cx);
+
+ panel.read_with(&cx, |panel, cx| {
+ let thread = panel.active_agent_thread(cx).unwrap();
+ assert!(
+ thread.read(cx).entries().is_empty(),
+ "newly opened draft thread should have no entries"
+ );
+ assert!(panel.background_threads.is_empty());
+ });
+
+ let connection_b = StubAgentConnection::new();
+ open_thread_with_connection(&panel, connection_b, &mut cx);
+
+ panel.read_with(&cx, |panel, _cx| {
+ assert!(
+ panel.background_threads.is_empty(),
+ "empty draft thread should not be retained in background_threads"
+ );
+ assert!(
+ !panel.background_threads.contains_key(&session_id_a),
+ "empty draft thread should not be keyed in background_threads"
+ );
+ });
+ }
+
#[gpui::test]
async fn test_running_thread_retained_when_navigating_away(cx: &mut TestAppContext) {
let (panel, mut cx) = setup_panel(cx).await;
@@ -4809,6 +4915,7 @@ mod tests {
// Open thread B — thread A goes to background.
let connection_b = StubAgentConnection::new();
open_thread_with_connection(&panel, connection_b, &mut cx);
+ send_message(&panel, &mut cx);
let session_id_b = active_session_id(&panel, &cx);
@@ -5068,7 +5175,7 @@ mod tests {
multi_workspace
.read_with(cx, |multi_workspace, _cx| {
assert_eq!(
- multi_workspace.workspaces().len(),
+ multi_workspace.workspaces().count(),
1,
"LocalProject should not create a new workspace"
);
@@ -5344,6 +5451,11 @@ mod tests {
let multi_workspace =
cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ multi_workspace
+ .update(cx, |multi_workspace, _, cx| {
+ multi_workspace.open_sidebar(cx);
+ })
+ .unwrap();
let workspace = multi_workspace
.read_with(cx, |multi_workspace, _cx| {
@@ -5431,15 +5543,14 @@ mod tests {
.read_with(cx, |multi_workspace, cx| {
// There should be more than one workspace now (the original + the new worktree).
assert!(
- multi_workspace.workspaces().len() > 1,
+ multi_workspace.workspaces().count() > 1,
"expected a new workspace to have been created, found {}",
- multi_workspace.workspaces().len(),
+ multi_workspace.workspaces().count(),
);
// Check the newest workspace's panel for the correct agent.
let new_workspace = multi_workspace
.workspaces()
- .iter()
.find(|ws| ws.entity_id() != workspace.entity_id())
.expect("should find the new workspace");
let new_panel = new_workspace
diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs
index 98715056ccec43fb91cc4dc9307cf41d84719fc0..5cff5bfc38d4512d659d919c6e7c4ff02fcc0caf 100644
--- a/crates/agent_ui/src/agent_ui.rs
+++ b/crates/agent_ui/src/agent_ui.rs
@@ -173,6 +173,22 @@ actions!(
ToggleThinkingEffortMenu,
/// Toggles fast mode for models that support it.
ToggleFastMode,
+ /// Scroll the output by one page up.
+ ScrollOutputPageUp,
+ /// Scroll the output by one page down.
+ ScrollOutputPageDown,
+ /// Scroll the output up by three lines.
+ ScrollOutputLineUp,
+ /// Scroll the output down by three lines.
+ ScrollOutputLineDown,
+ /// Scroll the output to the top.
+ ScrollOutputToTop,
+ /// Scroll the output to the bottom.
+ ScrollOutputToBottom,
+ /// Scroll the output to the previous user message.
+ ScrollOutputToPreviousMessage,
+ /// Scroll the output to the next user message.
+ ScrollOutputToNextMessage,
]
);
@@ -674,7 +690,9 @@ mod tests {
use feature_flags::FeatureFlagAppExt;
use gpui::{BorrowAppContext, TestAppContext, px};
use project::DisableAiSettings;
- use settings::{DockPosition, NotifyWhenAgentWaiting, Settings, SettingsStore};
+ use settings::{
+ DockPosition, NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, Settings, SettingsStore,
+ };
#[gpui::test]
fn test_agent_command_palette_visibility(cx: &mut TestAppContext) {
@@ -705,7 +723,7 @@ mod tests {
default_profile: AgentProfileId::default(),
profiles: Default::default(),
notify_when_agent_waiting: NotifyWhenAgentWaiting::default(),
- play_sound_when_agent_done: false,
+ play_sound_when_agent_done: PlaySoundWhenAgentDone::Never,
single_file_review: false,
model_parameters: vec![],
enable_feedback: false,
@@ -716,6 +734,7 @@ mod tests {
message_editor_min_lines: 1,
tool_permissions: Default::default(),
show_turn_stats: false,
+ show_merge_conflict_indicator: true,
new_thread_location: Default::default(),
sidebar_side: Default::default(),
thinking_display: Default::default(),
diff --git a/crates/agent_ui/src/buffer_codegen.rs b/crates/agent_ui/src/buffer_codegen.rs
index 420f8665e349c4e79222cdfa034de44971fab538..d5288c564d7211a986fa6347e2b74782c58d9c75 100644
--- a/crates/agent_ui/src/buffer_codegen.rs
+++ b/crates/agent_ui/src/buffer_codegen.rs
@@ -303,7 +303,7 @@ impl CodegenAlternative {
let snapshot = buffer.read(cx).snapshot(cx);
let (old_buffer, _, _) = snapshot
- .range_to_buffer_ranges(range.start..=range.end)
+ .range_to_buffer_ranges(range.start..range.end)
.pop()
.unwrap();
let old_buffer = cx.new(|cx| {
@@ -684,7 +684,7 @@ impl CodegenAlternative {
let language_name = {
let multibuffer = self.buffer.read(cx);
let snapshot = multibuffer.snapshot(cx);
- let ranges = snapshot.range_to_buffer_ranges(self.range.start..=self.range.end);
+ let ranges = snapshot.range_to_buffer_ranges(self.range.start..self.range.end);
ranges
.first()
.and_then(|(buffer, _, _)| buffer.language())
diff --git a/crates/agent_ui/src/completion_provider.rs b/crates/agent_ui/src/completion_provider.rs
index 6259269834b0add5b87fd9d397e17671d30adb9f..47fd7b0295adbcd2ecea768c3bd9e321a5f551b9 100644
--- a/crates/agent_ui/src/completion_provider.rs
+++ b/crates/agent_ui/src/completion_provider.rs
@@ -9,9 +9,7 @@ use crate::ThreadHistory;
use acp_thread::MentionUri;
use agent_client_protocol as acp;
use anyhow::Result;
-use editor::{
- CompletionProvider, Editor, ExcerptId, code_context_menus::COMPLETION_MENU_MAX_WIDTH,
-};
+use editor::{CompletionProvider, Editor, code_context_menus::COMPLETION_MENU_MAX_WIDTH};
use futures::FutureExt as _;
use fuzzy::{PathMatch, StringMatch, StringMatchCandidate};
use gpui::{App, BackgroundExecutor, Entity, SharedString, Task, WeakEntity};
@@ -621,7 +619,7 @@ impl PromptCompletionProvider {
for (terminal_text, terminal_range) in terminal_ranges {
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
let Some(start) =
- snapshot.as_singleton_anchor(source_range.start)
+ snapshot.anchor_in_excerpt(source_range.start)
else {
return;
};
@@ -1235,7 +1233,6 @@ impl PromptCompletionProvider {
impl CompletionProvider for PromptCompletionProvider {
fn completions(
&self,
- _excerpt_id: ExcerptId,
buffer: &Entity,
buffer_position: Anchor,
_trigger: CompletionContext,
@@ -2147,7 +2144,7 @@ fn build_code_label_for_path(
.theme()
.syntax()
.highlight_id("variable")
- .map(HighlightId);
+ .map(HighlightId::new);
let mut label = CodeLabelBuilder::default();
label.push_str(file, None);
diff --git a/crates/agent_ui/src/conversation_view.rs b/crates/agent_ui/src/conversation_view.rs
index 2231f421bc2af0d8038c002a72c226f551f243cc..7c9acfdf27d5b750afe4b8817af7f657f5fcdecc 100644
--- a/crates/agent_ui/src/conversation_view.rs
+++ b/crates/agent_ui/src/conversation_view.rs
@@ -85,8 +85,11 @@ use crate::{
AuthorizeToolCall, ClearMessageQueue, CycleFavoriteModels, CycleModeSelector,
CycleThinkingEffort, EditFirstQueuedMessage, ExpandMessageEditor, Follow, KeepAll, NewThread,
OpenAddContextMenu, OpenAgentDiff, OpenHistory, RejectAll, RejectOnce,
- RemoveFirstQueuedMessage, SendImmediately, SendNextQueuedMessage, ToggleFastMode,
- ToggleProfileSelector, ToggleThinkingEffortMenu, ToggleThinkingMode, UndoLastReject,
+ RemoveFirstQueuedMessage, ScrollOutputLineDown, ScrollOutputLineUp, ScrollOutputPageDown,
+ ScrollOutputPageUp, ScrollOutputToBottom, ScrollOutputToNextMessage,
+ ScrollOutputToPreviousMessage, ScrollOutputToTop, SendImmediately, SendNextQueuedMessage,
+ ToggleFastMode, ToggleProfileSelector, ToggleThinkingEffortMenu, ToggleThinkingMode,
+ UndoLastReject,
};
const STOPWATCH_THRESHOLD: Duration = Duration::from_secs(30);
@@ -809,7 +812,7 @@ impl ConversationView {
let agent_id = self.agent.agent_id();
let session_capabilities = Arc::new(RwLock::new(SessionCapabilities::new(
thread.read(cx).prompt_capabilities(),
- vec![],
+ thread.read(cx).available_commands().to_vec(),
)));
let action_log = thread.read(cx).action_log().clone();
@@ -828,6 +831,8 @@ impl ConversationView {
let count = thread.read(cx).entries().len();
let list_state = ListState::new(0, gpui::ListAlignment::Top, px(2048.0));
+ list_state.set_follow_mode(gpui::FollowMode::Tail);
+
entry_view_state.update(cx, |view_state, cx| {
for ix in 0..count {
view_state.sync_entry(ix, &thread, window, cx);
@@ -841,7 +846,7 @@ impl ConversationView {
if let Some(scroll_position) = thread.read(cx).ui_scroll_position() {
list_state.scroll_to(scroll_position);
} else {
- list_state.set_follow_tail(true);
+ list_state.scroll_to_end();
}
AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx);
@@ -1257,9 +1262,11 @@ impl ConversationView {
AcpThreadEvent::EntryUpdated(index) => {
if let Some(active) = self.thread_view(&thread_id) {
let entry_view_state = active.read(cx).entry_view_state.clone();
+ let list_state = active.read(cx).list_state.clone();
entry_view_state.update(cx, |view_state, cx| {
- view_state.sync_entry(*index, thread, window, cx)
+ view_state.sync_entry(*index, thread, window, cx);
});
+ list_state.remeasure_items(*index..*index + 1);
active.update(cx, |active, cx| {
active.auto_expand_streaming_thought(cx);
});
@@ -1295,10 +1302,16 @@ impl ConversationView {
}
AcpThreadEvent::Stopped(stop_reason) => {
if let Some(active) = self.thread_view(&thread_id) {
+ let is_generating =
+ matches!(thread.read(cx).status(), ThreadStatus::Generating);
active.update(cx, |active, cx| {
- active.thread_retry_status.take();
- active.clear_auto_expand_tracking();
- active.list_state.set_follow_tail(false);
+ if !is_generating {
+ active.thread_retry_status.take();
+ active.clear_auto_expand_tracking();
+ if active.list_state.is_following_tail() {
+ active.list_state.scroll_to_end();
+ }
+ }
active.sync_generating_indicator(cx);
});
}
@@ -1367,9 +1380,15 @@ impl ConversationView {
}
AcpThreadEvent::Error => {
if let Some(active) = self.thread_view(&thread_id) {
+ let is_generating =
+ matches!(thread.read(cx).status(), ThreadStatus::Generating);
active.update(cx, |active, cx| {
- active.thread_retry_status.take();
- active.list_state.set_follow_tail(false);
+ if !is_generating {
+ active.thread_retry_status.take();
+ if active.list_state.is_following_tail() {
+ active.list_state.scroll_to_end();
+ }
+ }
active.sync_generating_indicator(cx);
});
}
@@ -1429,40 +1448,24 @@ impl ConversationView {
self.emit_token_limit_telemetry_if_needed(thread, cx);
}
AcpThreadEvent::AvailableCommandsUpdated(available_commands) => {
- let mut available_commands = available_commands.clone();
-
- if thread
- .read(cx)
- .connection()
- .auth_methods()
- .iter()
- .any(|method| method.id().0.as_ref() == "claude-login")
- {
- available_commands.push(acp::AvailableCommand::new("login", "Authenticate"));
- available_commands.push(acp::AvailableCommand::new("logout", "Authenticate"));
- }
+ if let Some(thread_view) = self.thread_view(&thread_id) {
+ let has_commands = !available_commands.is_empty();
- let has_commands = !available_commands.is_empty();
- if let Some(active) = self.active_thread() {
- active.update(cx, |active, _cx| {
- active
- .session_capabilities
- .write()
- .set_available_commands(available_commands);
- });
- }
-
- let agent_display_name = self
- .agent_server_store
- .read(cx)
- .agent_display_name(&self.agent.agent_id())
- .unwrap_or_else(|| self.agent.agent_id().0.to_string().into());
+ let agent_display_name = self
+ .agent_server_store
+ .read(cx)
+ .agent_display_name(&self.agent.agent_id())
+ .unwrap_or_else(|| self.agent.agent_id().0.to_string().into());
- if let Some(active) = self.active_thread() {
let new_placeholder =
placeholder_text(agent_display_name.as_ref(), has_commands);
- active.update(cx, |active, cx| {
- active.message_editor.update(cx, |editor, cx| {
+
+ thread_view.update(cx, |thread_view, cx| {
+ thread_view
+ .session_capabilities
+ .write()
+ .set_available_commands(available_commands.clone());
+ thread_view.message_editor.update(cx, |editor, cx| {
editor.set_placeholder_text(&new_placeholder, window, cx);
});
});
@@ -2329,9 +2332,9 @@ impl ConversationView {
}
}
+ #[cfg(feature = "audio")]
fn play_notification_sound(&self, window: &Window, cx: &mut App) {
- let settings = AgentSettings::get_global(cx);
- let _visible = window.is_window_active()
+ let visible = window.is_window_active()
&& if let Some(mw) = window.root::().flatten() {
self.agent_panel_visible(&mw, cx)
} else {
@@ -2339,8 +2342,8 @@ impl ConversationView {
.upgrade()
.is_some_and(|workspace| AgentPanel::is_visible(&workspace, cx))
};
- #[cfg(feature = "audio")]
- if settings.play_sound_when_agent_done && !_visible {
+ let settings = AgentSettings::get_global(cx);
+ if settings.play_sound_when_agent_done.should_play(visible) {
Audio::play_sound(Sound::AgentDone, cx);
}
}
@@ -2970,6 +2973,166 @@ pub(crate) mod tests {
});
}
+ #[derive(Clone)]
+ struct RestoredAvailableCommandsConnection;
+
+ impl AgentConnection for RestoredAvailableCommandsConnection {
+ fn agent_id(&self) -> AgentId {
+ AgentId::new("restored-available-commands")
+ }
+
+ fn telemetry_id(&self) -> SharedString {
+ "restored-available-commands".into()
+ }
+
+ fn new_session(
+ self: Rc,
+ project: Entity,
+ _work_dirs: PathList,
+ cx: &mut App,
+ ) -> Task>> {
+ let thread = build_test_thread(
+ self,
+ project,
+ "RestoredAvailableCommandsConnection",
+ SessionId::new("new-session"),
+ cx,
+ );
+ Task::ready(Ok(thread))
+ }
+
+ fn supports_load_session(&self) -> bool {
+ true
+ }
+
+ fn load_session(
+ self: Rc,
+ session_id: acp::SessionId,
+ project: Entity,
+ _work_dirs: PathList,
+ _title: Option,
+ cx: &mut App,
+ ) -> Task>> {
+ let thread = build_test_thread(
+ self,
+ project,
+ "RestoredAvailableCommandsConnection",
+ session_id,
+ cx,
+ );
+
+ thread
+ .update(cx, |thread, cx| {
+ thread.handle_session_update(
+ acp::SessionUpdate::AvailableCommandsUpdate(
+ acp::AvailableCommandsUpdate::new(vec![acp::AvailableCommand::new(
+ "help", "Get help",
+ )]),
+ ),
+ cx,
+ )
+ })
+ .expect("available commands update should succeed");
+
+ Task::ready(Ok(thread))
+ }
+
+ fn auth_methods(&self) -> &[acp::AuthMethod] {
+ &[]
+ }
+
+ fn authenticate(
+ &self,
+ _method_id: acp::AuthMethodId,
+ _cx: &mut App,
+ ) -> Task> {
+ Task::ready(Ok(()))
+ }
+
+ fn prompt(
+ &self,
+ _id: Option,
+ _params: acp::PromptRequest,
+ _cx: &mut App,
+ ) -> Task> {
+ Task::ready(Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)))
+ }
+
+ fn cancel(&self, _session_id: &acp::SessionId, _cx: &mut App) {}
+
+ fn into_any(self: Rc) -> Rc {
+ self
+ }
+ }
+
+ #[gpui::test]
+ async fn test_restored_threads_keep_available_commands(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ let project = Project::test(fs, [], cx).await;
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+
+ let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
+ let connection_store =
+ cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx)));
+
+ let conversation_view = cx.update(|window, cx| {
+ cx.new(|cx| {
+ ConversationView::new(
+ Rc::new(StubAgentServer::new(RestoredAvailableCommandsConnection)),
+ connection_store,
+ Agent::Custom { id: "Test".into() },
+ Some(SessionId::new("restored-session")),
+ None,
+ None,
+ None,
+ workspace.downgrade(),
+ project,
+ Some(thread_store),
+ None,
+ window,
+ cx,
+ )
+ })
+ });
+
+ cx.run_until_parked();
+
+ let message_editor = message_editor(&conversation_view, cx);
+ let editor =
+ message_editor.update(cx, |message_editor, _cx| message_editor.editor().clone());
+ let placeholder = editor.update(cx, |editor, cx| editor.placeholder_text(cx));
+
+ active_thread(&conversation_view, cx).read_with(cx, |view, _cx| {
+ let available_commands = view
+ .session_capabilities
+ .read()
+ .available_commands()
+ .to_vec();
+ assert_eq!(available_commands.len(), 1);
+ assert_eq!(available_commands[0].name.as_str(), "help");
+ assert_eq!(available_commands[0].description.as_str(), "Get help");
+ });
+
+ assert_eq!(
+ placeholder,
+ Some("Message Test — @ to include context, / for commands".to_string())
+ );
+
+ message_editor.update_in(cx, |editor, window, cx| {
+ editor.set_text("/help", window, cx);
+ });
+
+ let contents_result = message_editor
+ .update(cx, |editor, cx| editor.contents(false, cx))
+ .await;
+
+ assert!(contents_result.is_ok());
+ }
+
#[gpui::test]
async fn test_resume_thread_uses_session_cwd_when_inside_project(cx: &mut TestAppContext) {
init_test(cx);
@@ -3356,7 +3519,6 @@ pub(crate) mod tests {
// Verify workspace1 is no longer the active workspace
multi_workspace_handle
.read_with(cx, |mw, _cx| {
- assert_eq!(mw.active_workspace_index(), 1);
assert_ne!(mw.workspace(), &workspace1);
})
.unwrap();
@@ -4851,6 +5013,63 @@ pub(crate) mod tests {
});
}
+ #[gpui::test]
+ async fn test_stale_stop_does_not_disable_follow_tail_during_regenerate(
+ cx: &mut TestAppContext,
+ ) {
+ init_test(cx);
+
+ let connection = StubAgentConnection::new();
+
+ let (conversation_view, cx) =
+ setup_conversation_view(StubAgentServer::new(connection.clone()), cx).await;
+ add_to_workspace(conversation_view.clone(), cx);
+
+ let message_editor = message_editor(&conversation_view, cx);
+ message_editor.update_in(cx, |editor, window, cx| {
+ editor.set_text("Original message to edit", window, cx);
+ });
+ active_thread(&conversation_view, cx)
+ .update_in(cx, |view, window, cx| view.send(window, cx));
+
+ cx.run_until_parked();
+
+ let user_message_editor = conversation_view.read_with(cx, |view, cx| {
+ view.active_thread()
+ .map(|active| &active.read(cx).entry_view_state)
+ .as_ref()
+ .unwrap()
+ .read(cx)
+ .entry(0)
+ .unwrap()
+ .message_editor()
+ .unwrap()
+ .clone()
+ });
+
+ cx.focus(&user_message_editor);
+ user_message_editor.update_in(cx, |editor, window, cx| {
+ editor.set_text("Edited message content", window, cx);
+ });
+
+ user_message_editor.update_in(cx, |_editor, window, cx| {
+ window.dispatch_action(Box::new(Chat), cx);
+ });
+
+ cx.run_until_parked();
+
+ conversation_view.read_with(cx, |view, cx| {
+ let active = view.active_thread().unwrap();
+ let active = active.read(cx);
+
+ assert_eq!(active.thread.read(cx).status(), ThreadStatus::Generating);
+ assert!(
+ active.list_state.is_following_tail(),
+ "stale stop events from the cancelled turn must not disable follow-tail for the new turn"
+ );
+ });
+ }
+
struct GeneratingThreadSetup {
conversation_view: Entity,
thread: Entity,
diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs
index b25769eadbe31c35a6261cc9433349a2943617be..685621eb3c93632f1e7410bbbad22b623d5e18c7 100644
--- a/crates/agent_ui/src/conversation_view/thread_view.rs
+++ b/crates/agent_ui/src/conversation_view/thread_view.rs
@@ -344,7 +344,8 @@ impl ThreadView {
) -> Self {
let id = thread.read(cx).session_id().clone();
- let placeholder = placeholder_text(agent_display_name.as_ref(), false);
+ let has_commands = !session_capabilities.read().available_commands().is_empty();
+ let placeholder = placeholder_text(agent_display_name.as_ref(), has_commands);
let history_subscription = history.as_ref().map(|h| {
cx.observe(h, |this, history, cx| {
@@ -541,31 +542,15 @@ impl ThreadView {
let thread_view = cx.entity().downgrade();
this.list_state
- .set_scroll_handler(move |event, _window, cx| {
+ .set_scroll_handler(move |_event, _window, cx| {
let list_state = list_state_for_scroll.clone();
let thread_view = thread_view.clone();
- let is_following_tail = event.is_following_tail;
// N.B. We must defer because the scroll handler is called while the
// ListState's RefCell is mutably borrowed. Reading logical_scroll_top()
// directly would panic from a double borrow.
cx.defer(move |cx| {
let scroll_top = list_state.logical_scroll_top();
let _ = thread_view.update(cx, |this, cx| {
- if !is_following_tail {
- let is_at_bottom = {
- let current_offset =
- list_state.scroll_px_offset_for_scrollbar().y.abs();
- let max_offset = list_state.max_offset_for_scrollbar().y;
- current_offset >= max_offset - px(1.0)
- };
-
- let is_generating =
- matches!(this.thread.read(cx).status(), ThreadStatus::Generating);
-
- if is_at_bottom && is_generating {
- list_state.set_follow_tail(true);
- }
- }
if let Some(thread) = this.as_native_thread(cx) {
thread.update(cx, |thread, _cx| {
thread.set_ui_scroll_position(Some(scroll_top));
@@ -832,13 +817,10 @@ impl ThreadView {
}
}
}));
- if self.parent_id.is_none() {
- self.suppress_merge_conflict_notification(cx);
- }
generation
}
- pub fn stop_turn(&mut self, generation: usize, cx: &mut Context) {
+ pub fn stop_turn(&mut self, generation: usize, _cx: &mut Context) {
if self.turn_fields.turn_generation != generation {
return;
}
@@ -849,25 +831,6 @@ impl ThreadView {
.map(|started| started.elapsed());
self.turn_fields.last_turn_tokens = self.turn_fields.turn_tokens.take();
self.turn_fields._turn_timer_task = None;
- if self.parent_id.is_none() {
- self.unsuppress_merge_conflict_notification(cx);
- }
- }
-
- fn suppress_merge_conflict_notification(&self, cx: &mut Context) {
- self.workspace
- .update(cx, |workspace, cx| {
- workspace.suppress_notification(&workspace::merge_conflict_notification_id(), cx);
- })
- .ok();
- }
-
- fn unsuppress_merge_conflict_notification(&self, cx: &mut Context) {
- self.workspace
- .update(cx, |workspace, _cx| {
- workspace.unsuppress(workspace::merge_conflict_notification_id());
- })
- .ok();
}
pub fn update_turn_tokens(&mut self, cx: &App) {
@@ -1077,7 +1040,7 @@ impl ThreadView {
})?;
let _ = this.update(cx, |this, cx| {
- this.list_state.set_follow_tail(true);
+ this.list_state.scroll_to_end();
cx.notify();
});
@@ -4978,6 +4941,105 @@ impl ThreadView {
cx.notify();
}
+ fn scroll_output_page_up(
+ &mut self,
+ _: &ScrollOutputPageUp,
+ _window: &mut Window,
+ cx: &mut Context,
+ ) {
+ let page_height = self.list_state.viewport_bounds().size.height;
+ self.list_state.scroll_by(-page_height * 0.9);
+ cx.notify();
+ }
+
+ fn scroll_output_page_down(
+ &mut self,
+ _: &ScrollOutputPageDown,
+ _window: &mut Window,
+ cx: &mut Context,
+ ) {
+ let page_height = self.list_state.viewport_bounds().size.height;
+ self.list_state.scroll_by(page_height * 0.9);
+ cx.notify();
+ }
+
+ fn scroll_output_line_up(
+ &mut self,
+ _: &ScrollOutputLineUp,
+ window: &mut Window,
+ cx: &mut Context,
+ ) {
+ self.list_state.scroll_by(-window.line_height() * 3.);
+ cx.notify();
+ }
+
+ fn scroll_output_line_down(
+ &mut self,
+ _: &ScrollOutputLineDown,
+ window: &mut Window,
+ cx: &mut Context,
+ ) {
+ self.list_state.scroll_by(window.line_height() * 3.);
+ cx.notify();
+ }
+
+ fn scroll_output_to_top(
+ &mut self,
+ _: &ScrollOutputToTop,
+ _window: &mut Window,
+ cx: &mut Context,
+ ) {
+ self.scroll_to_top(cx);
+ }
+
+ fn scroll_output_to_bottom(
+ &mut self,
+ _: &ScrollOutputToBottom,
+ _window: &mut Window,
+ cx: &mut Context,
+ ) {
+ self.scroll_to_end(cx);
+ }
+
+ fn scroll_output_to_previous_message(
+ &mut self,
+ _: &ScrollOutputToPreviousMessage,
+ _window: &mut Window,
+ cx: &mut Context,
+ ) {
+ let entries = self.thread.read(cx).entries();
+ let current_ix = self.list_state.logical_scroll_top().item_ix;
+ if let Some(target_ix) = (0..current_ix)
+ .rev()
+ .find(|&i| matches!(entries.get(i), Some(AgentThreadEntry::UserMessage(_))))
+ {
+ self.list_state.scroll_to(ListOffset {
+ item_ix: target_ix,
+ offset_in_item: px(0.),
+ });
+ cx.notify();
+ }
+ }
+
+ fn scroll_output_to_next_message(
+ &mut self,
+ _: &ScrollOutputToNextMessage,
+ _window: &mut Window,
+ cx: &mut Context,
+ ) {
+ let entries = self.thread.read(cx).entries();
+ let current_ix = self.list_state.logical_scroll_top().item_ix;
+ if let Some(target_ix) = (current_ix + 1..entries.len())
+ .find(|&i| matches!(entries.get(i), Some(AgentThreadEntry::UserMessage(_))))
+ {
+ self.list_state.scroll_to(ListOffset {
+ item_ix: target_ix,
+ offset_in_item: px(0.),
+ });
+ cx.notify();
+ }
+ }
+
pub fn open_thread_as_markdown(
&self,
workspace: Entity,
@@ -5207,9 +5269,12 @@ impl ThreadView {
match thinking_display {
ThinkingBlockDisplay::Auto => {
- if self.expanded_thinking_blocks.contains(&key) {
+ let is_open = self.expanded_thinking_blocks.contains(&key)
+ || self.user_toggled_thinking_blocks.contains(&key);
+
+ if is_open {
self.expanded_thinking_blocks.remove(&key);
- self.user_toggled_thinking_blocks.insert(key);
+ self.user_toggled_thinking_blocks.remove(&key);
} else {
self.expanded_thinking_blocks.insert(key);
self.user_toggled_thinking_blocks.insert(key);
@@ -7126,17 +7191,10 @@ impl ThreadView {
};
active_editor.update_in(cx, |editor, window, cx| {
- let singleton = editor
- .buffer()
- .read(cx)
- .read(cx)
- .as_singleton()
- .map(|(a, b, _)| (a, b));
- if let Some((excerpt_id, buffer_id)) = singleton
- && let Some(agent_buffer) = agent_location.buffer.upgrade()
- && agent_buffer.read(cx).remote_id() == buffer_id
+ let snapshot = editor.buffer().read(cx).snapshot(cx);
+ if snapshot.as_singleton().is_some()
+ && let Some(anchor) = snapshot.anchor_in_excerpt(agent_location.position)
{
- let anchor = editor::Anchor::in_buffer(excerpt_id, agent_location.position);
editor.change_selections(Default::default(), window, cx, |selections| {
selections.select_anchor_ranges([anchor..anchor]);
})
@@ -7332,9 +7390,8 @@ impl ThreadView {
.gap_2()
.map(|this| {
if card_layout {
- this.when(context_ix > 0, |this| {
- this.pt_2()
- .border_t_1()
+ this.p_2().when(context_ix > 0, |this| {
+ this.border_t_1()
.border_color(self.tool_card_border_color(cx))
})
} else {
@@ -8545,6 +8602,14 @@ impl Render for ThreadView {
.on_action(cx.listener(Self::handle_toggle_command_pattern))
.on_action(cx.listener(Self::open_permission_dropdown))
.on_action(cx.listener(Self::open_add_context_menu))
+ .on_action(cx.listener(Self::scroll_output_page_up))
+ .on_action(cx.listener(Self::scroll_output_page_down))
+ .on_action(cx.listener(Self::scroll_output_line_up))
+ .on_action(cx.listener(Self::scroll_output_line_down))
+ .on_action(cx.listener(Self::scroll_output_to_top))
+ .on_action(cx.listener(Self::scroll_output_to_bottom))
+ .on_action(cx.listener(Self::scroll_output_to_previous_message))
+ .on_action(cx.listener(Self::scroll_output_to_next_message))
.on_action(cx.listener(|this, _: &ToggleFastMode, _window, cx| {
this.toggle_fast_mode(cx);
}))
@@ -8732,7 +8797,7 @@ pub(crate) fn open_link(
.open_path(path, None, true, window, cx)
.detach_and_log_err(cx);
}
- MentionUri::PastedImage => {}
+ MentionUri::PastedImage { .. } => {}
MentionUri::Directory { abs_path } => {
let project = workspace.project();
let Some(entry_id) = project.update(cx, |project, cx| {
diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs
index 01543b657fc2d00fbf8c68cd96c6329d2f4952d6..39d70790e0d4a18554b2a1c11510e529d921cd1b 100644
--- a/crates/agent_ui/src/inline_assistant.rs
+++ b/crates/agent_ui/src/inline_assistant.rs
@@ -27,8 +27,8 @@ use editor::RowExt;
use editor::SelectionEffects;
use editor::scroll::ScrollOffset;
use editor::{
- Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, ExcerptId, HighlightKey,
- MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint,
+ Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, HighlightKey, MultiBuffer,
+ MultiBufferSnapshot, ToOffset as _, ToPoint,
actions::SelectAll,
display_map::{
BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, EditorMargins,
@@ -443,15 +443,17 @@ impl InlineAssistant {
let newest_selection = newest_selection.unwrap();
let mut codegen_ranges = Vec::new();
- for (buffer, buffer_range, excerpt_id) in
- snapshot.ranges_to_buffer_ranges(selections.iter().map(|selection| {
- snapshot.anchor_before(selection.start)..snapshot.anchor_after(selection.end)
- }))
+ for (buffer, buffer_range, _) in selections
+ .iter()
+ .flat_map(|selection| snapshot.range_to_buffer_ranges(selection.start..selection.end))
{
- let anchor_range = Anchor::range_in_buffer(
- excerpt_id,
- buffer.anchor_before(buffer_range.start)..buffer.anchor_after(buffer_range.end),
- );
+ let (Some(start), Some(end)) = (
+ snapshot.anchor_in_buffer(buffer.anchor_before(buffer_range.start)),
+ snapshot.anchor_in_buffer(buffer.anchor_after(buffer_range.end)),
+ ) else {
+ continue;
+ };
+ let anchor_range = start..end;
codegen_ranges.push(anchor_range);
@@ -982,8 +984,7 @@ impl InlineAssistant {
match event {
EditorEvent::Edited { transaction_id } => {
let buffer = editor.read(cx).buffer().read(cx);
- let edited_ranges =
- buffer.edited_ranges_for_transaction::(*transaction_id, cx);
+ let edited_ranges = buffer.edited_ranges_for_transaction(*transaction_id, cx);
let snapshot = buffer.snapshot(cx);
for assist_id in editor_assists.assist_ids.clone() {
@@ -1089,7 +1090,7 @@ impl InlineAssistant {
let multibuffer = editor.read(cx).buffer().read(cx);
let snapshot = multibuffer.snapshot(cx);
let ranges =
- snapshot.range_to_buffer_ranges(assist.range.start..=assist.range.end);
+ snapshot.range_to_buffer_ranges(assist.range.start..assist.range.end);
ranges
.first()
.and_then(|(buffer, _, _)| buffer.language())
@@ -1496,10 +1497,10 @@ impl InlineAssistant {
let mut new_blocks = Vec::new();
for (new_row, old_row_range) in deleted_row_ranges {
- let (_, start, _) = old_snapshot
+ let (_, start) = old_snapshot
.point_to_buffer_point(Point::new(*old_row_range.start(), 0))
.unwrap();
- let (_, end, _) = old_snapshot
+ let (_, end) = old_snapshot
.point_to_buffer_point(Point::new(
*old_row_range.end(),
old_snapshot.line_len(MultiBufferRow(*old_row_range.end())),
@@ -1530,7 +1531,7 @@ impl InlineAssistant {
editor.set_read_only(true);
editor.set_show_edit_predictions(Some(false), window, cx);
editor.highlight_rows::(
- Anchor::min()..Anchor::max(),
+ Anchor::Min..Anchor::Max,
cx.theme().status().deleted_background,
Default::default(),
cx,
@@ -1938,9 +1939,8 @@ impl CodeActionProvider for AssistantCodeActionProvider {
fn apply_code_action(
&self,
- buffer: Entity,
+ _buffer: Entity,
action: CodeAction,
- excerpt_id: ExcerptId,
_push_to_history: bool,
window: &mut Window,
cx: &mut App,
@@ -1970,31 +1970,8 @@ impl CodeActionProvider for AssistantCodeActionProvider {
let range = editor
.update(cx, |editor, cx| {
editor.buffer().update(cx, |multibuffer, cx| {
- let buffer = buffer.read(cx);
- let multibuffer_snapshot = multibuffer.read(cx);
-
- let old_context_range =
- multibuffer_snapshot.context_range_for_excerpt(excerpt_id)?;
- let mut new_context_range = old_context_range.clone();
- if action
- .range
- .start
- .cmp(&old_context_range.start, buffer)
- .is_lt()
- {
- new_context_range.start = action.range.start;
- }
- if action.range.end.cmp(&old_context_range.end, buffer).is_gt() {
- new_context_range.end = action.range.end;
- }
- drop(multibuffer_snapshot);
-
- if new_context_range != old_context_range {
- multibuffer.resize_excerpt(excerpt_id, new_context_range, cx);
- }
-
let multibuffer_snapshot = multibuffer.read(cx);
- multibuffer_snapshot.anchor_range_in_excerpt(excerpt_id, action.range)
+ multibuffer_snapshot.buffer_anchor_range_to_anchor_range(action.range)
})
})
.context("invalid range")?;
@@ -2048,7 +2025,7 @@ fn merge_ranges(ranges: &mut Vec>, buffer: &MultiBufferSnapshot) {
pub mod evals {
use crate::InlineAssistant;
use agent::ThreadStore;
- use client::{Client, UserStore};
+ use client::{Client, RefreshLlmTokenListener, UserStore};
use editor::{Editor, MultiBuffer, MultiBufferOffset};
use eval_utils::{EvalOutput, NoProcessor};
use fs::FakeFs;
@@ -2114,7 +2091,8 @@ pub mod evals {
client::init(&client, cx);
workspace::init(app_state.clone(), cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
- language_model::init(user_store.clone(), client.clone(), cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
language_models::init(user_store, client.clone(), cx);
cx.set_global(inline_assistant);
diff --git a/crates/agent_ui/src/mention_set.rs b/crates/agent_ui/src/mention_set.rs
index 2559edc566d4467eaaab180e0a16f4af5fae7ab9..1b2ec0ad2fd460b4eec5a8b757bdd3058d4a3704 100644
--- a/crates/agent_ui/src/mention_set.rs
+++ b/crates/agent_ui/src/mention_set.rs
@@ -6,7 +6,7 @@ use agent_servers::{AgentServer, AgentServerDelegate};
use anyhow::{Context as _, Result, anyhow};
use collections::{HashMap, HashSet};
use editor::{
- Anchor, Editor, EditorSnapshot, ExcerptId, FoldPlaceholder, ToOffset,
+ Anchor, Editor, EditorSnapshot, FoldPlaceholder, ToOffset,
display_map::{Crease, CreaseId, CreaseMetadata, FoldId},
scroll::Autoscroll,
};
@@ -154,7 +154,7 @@ impl MentionSet {
MentionUri::Selection { abs_path: None, .. } => Task::ready(Err(anyhow!(
"Untitled buffer selection mentions are not supported for paste"
))),
- MentionUri::PastedImage
+ MentionUri::PastedImage { .. }
| MentionUri::TerminalSelection { .. }
| MentionUri::MergeConflict { .. } => {
Task::ready(Err(anyhow!("Unsupported mention URI type for paste")))
@@ -204,10 +204,9 @@ impl MentionSet {
};
let snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx));
- let Some(start_anchor) = snapshot.buffer_snapshot().as_singleton_anchor(start) else {
+ let Some(start_anchor) = snapshot.buffer_snapshot().anchor_in_excerpt(start) else {
return Task::ready(());
};
- let excerpt_id = start_anchor.excerpt_id;
let end_anchor = snapshot.buffer_snapshot().anchor_before(
start_anchor.to_offset(&snapshot.buffer_snapshot()) + content_len + 1usize,
);
@@ -234,7 +233,6 @@ impl MentionSet {
})
.shared();
insert_crease_for_mention(
- excerpt_id,
start,
content_len,
mention_uri.name().into(),
@@ -249,7 +247,6 @@ impl MentionSet {
)
} else {
insert_crease_for_mention(
- excerpt_id,
start,
content_len,
crease_text,
@@ -286,7 +283,7 @@ impl MentionSet {
include_errors,
include_warnings,
} => self.confirm_mention_for_diagnostics(include_errors, include_warnings, cx),
- MentionUri::PastedImage => {
+ MentionUri::PastedImage { .. } => {
debug_panic!("pasted image URI should not be included in completions");
Task::ready(Err(anyhow!(
"pasted imaged URI should not be included in completions"
@@ -468,7 +465,7 @@ impl MentionSet {
};
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
- let Some(start) = snapshot.as_singleton_anchor(source_range.start) else {
+ let Some(start) = snapshot.anchor_in_excerpt(source_range.start) else {
return;
};
@@ -742,22 +739,22 @@ pub(crate) async fn insert_images_as_context(
return;
}
- let replacement_text = MentionUri::PastedImage.as_link().to_string();
-
for (image, name) in images {
- let Some((excerpt_id, text_anchor, multibuffer_anchor)) = editor
+ let mention_uri = MentionUri::PastedImage {
+ name: name.to_string(),
+ };
+ let replacement_text = mention_uri.as_link().to_string();
+ let Some((text_anchor, multibuffer_anchor)) = editor
.update_in(cx, |editor, window, cx| {
let snapshot = editor.snapshot(window, cx);
- let (excerpt_id, _, buffer_snapshot) =
- snapshot.buffer_snapshot().as_singleton().unwrap();
-
- let cursor_anchor = editor.selections.newest_anchor().start.text_anchor;
- let text_anchor = cursor_anchor.bias_left(&buffer_snapshot);
- let multibuffer_anchor = snapshot
+ let (cursor_anchor, buffer_snapshot) = snapshot
.buffer_snapshot()
- .anchor_in_excerpt(excerpt_id, text_anchor);
+ .anchor_to_buffer_anchor(editor.selections.newest_anchor().start)
+ .unwrap();
+ let text_anchor = cursor_anchor.bias_left(buffer_snapshot);
+ let multibuffer_anchor = snapshot.buffer_snapshot().anchor_in_excerpt(text_anchor);
editor.insert(&format!("{replacement_text} "), window, cx);
- (excerpt_id, text_anchor, multibuffer_anchor)
+ (text_anchor, multibuffer_anchor)
})
.ok()
else {
@@ -775,7 +772,6 @@ pub(crate) async fn insert_images_as_context(
let image = Arc::new(image);
let Ok(Some((crease_id, tx))) = cx.update(|window, cx| {
insert_crease_for_mention(
- excerpt_id,
text_anchor,
content_len,
name.clone(),
@@ -810,7 +806,13 @@ pub(crate) async fn insert_images_as_context(
.shared();
mention_set.update(cx, |mention_set, _cx| {
- mention_set.insert_mention(crease_id, MentionUri::PastedImage, task.clone())
+ mention_set.insert_mention(
+ crease_id,
+ MentionUri::PastedImage {
+ name: name.to_string(),
+ },
+ task.clone(),
+ )
});
if task
@@ -879,7 +881,7 @@ pub(crate) fn paste_images_as_context(
Some(window.spawn(cx, async move |mut cx| {
use itertools::Itertools;
- let default_name: SharedString = MentionUri::PastedImage.name().into();
+ let default_name: SharedString = "Image".into();
let (mut images, paths): (Vec<(gpui::Image, SharedString)>, Vec<_>) = clipboard
.into_entries()
.filter_map(|entry| match entry {
@@ -909,7 +911,6 @@ pub(crate) fn paste_images_as_context(
}
pub(crate) fn insert_crease_for_mention(
- excerpt_id: ExcerptId,
anchor: text::Anchor,
content_len: usize,
crease_label: SharedString,
@@ -927,7 +928,7 @@ pub(crate) fn insert_crease_for_mention(
let crease_id = editor.update(cx, |editor, cx| {
let snapshot = editor.buffer().read(cx).snapshot(cx);
- let start = snapshot.anchor_in_excerpt(excerpt_id, anchor)?;
+ let start = snapshot.anchor_in_excerpt(anchor)?;
let start = start.bias_right(&snapshot);
let end = snapshot.anchor_before(start.to_offset(&snapshot) + content_len);
diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs
index df36f38899c9abea165d0ff5a01834a2bb84c82f..0f59441ab27b5074a710c46a683e72d003a8d5d7 100644
--- a/crates/agent_ui/src/message_editor.rs
+++ b/crates/agent_ui/src/message_editor.rs
@@ -203,12 +203,10 @@ fn insert_mention_for_project_path(
MentionInsertPosition::AtCursor => editor.update(cx, |editor, cx| {
let buffer = editor.buffer().read(cx);
let snapshot = buffer.snapshot(cx);
- let (_, _, buffer_snapshot) = snapshot.as_singleton()?;
- let text_anchor = editor
- .selections
- .newest_anchor()
- .start
- .text_anchor
+ let buffer_snapshot = snapshot.as_singleton()?;
+ let text_anchor = snapshot
+ .anchor_to_buffer_anchor(editor.selections.newest_anchor().start)?
+ .0
.bias_left(&buffer_snapshot);
editor.insert(&mention_text, window, cx);
@@ -224,7 +222,7 @@ fn insert_mention_for_project_path(
editor.update(cx, |editor, cx| {
editor.edit(
[(
- multi_buffer::Anchor::max()..multi_buffer::Anchor::max(),
+ multi_buffer::Anchor::Max..multi_buffer::Anchor::Max,
new_text,
)],
cx,
@@ -263,7 +261,7 @@ async fn resolve_pasted_context_items(
) -> (Vec, Vec>) {
let mut items = Vec::new();
let mut added_worktrees = Vec::new();
- let default_image_name: SharedString = MentionUri::PastedImage.name().into();
+ let default_image_name: SharedString = "Image".into();
for entry in entries {
match entry {
@@ -603,7 +601,7 @@ impl MessageEditor {
COMMAND_HINT_INLAY_ID,
hint_pos,
&InlayHint {
- position: hint_pos.text_anchor,
+ position: snapshot.anchor_to_buffer_anchor(hint_pos)?.0,
label: InlayHintLabel::String(hint),
kind: Some(InlayHintKind::Parameter),
padding_left: false,
@@ -640,12 +638,11 @@ impl MessageEditor {
let start = self.editor.update(cx, |editor, cx| {
editor.set_text(content, window, cx);
- editor
- .buffer()
- .read(cx)
- .snapshot(cx)
- .anchor_before(Point::zero())
- .text_anchor
+ let snapshot = editor.buffer().read(cx).snapshot(cx);
+ snapshot
+ .anchor_to_buffer_anchor(snapshot.anchor_before(Point::zero()))
+ .unwrap()
+ .0
});
let supports_images = self.session_capabilities.read().supports_images();
@@ -815,7 +812,9 @@ impl MessageEditor {
)
.uri(match uri {
MentionUri::File { .. } => Some(uri.to_uri().to_string()),
- MentionUri::PastedImage => None,
+ MentionUri::PastedImage { .. } => {
+ Some(uri.to_uri().to_string())
+ }
other => {
debug_panic!(
"unexpected mention uri for image: {:?}",
@@ -999,13 +998,10 @@ impl MessageEditor {
if should_insert_creases && let Some(selections) = editor_clipboard_selections {
cx.stop_propagation();
- let insertion_target = self
- .editor
- .read(cx)
- .selections
- .newest_anchor()
- .start
- .text_anchor;
+ let snapshot = self.editor.read(cx).buffer().read(cx).snapshot(cx);
+ let (insertion_target, _) = snapshot
+ .anchor_to_buffer_anchor(self.editor.read(cx).selections.newest_anchor().start)
+ .unwrap();
let project = workspace.read(cx).project().clone();
for selection in selections {
@@ -1021,21 +1017,19 @@ impl MessageEditor {
};
let mention_text = mention_uri.as_link().to_string();
- let (excerpt_id, text_anchor, content_len) =
- self.editor.update(cx, |editor, cx| {
- let buffer = editor.buffer().read(cx);
- let snapshot = buffer.snapshot(cx);
- let (excerpt_id, _, buffer_snapshot) = snapshot.as_singleton().unwrap();
- let text_anchor = insertion_target.bias_left(&buffer_snapshot);
+ let (text_anchor, content_len) = self.editor.update(cx, |editor, cx| {
+ let buffer = editor.buffer().read(cx);
+ let snapshot = buffer.snapshot(cx);
+ let buffer_snapshot = snapshot.as_singleton().unwrap();
+ let text_anchor = insertion_target.bias_left(&buffer_snapshot);
- editor.insert(&mention_text, window, cx);
- editor.insert(" ", window, cx);
+ editor.insert(&mention_text, window, cx);
+ editor.insert(" ", window, cx);
- (excerpt_id, text_anchor, mention_text.len())
- });
+ (text_anchor, mention_text.len())
+ });
let Some((crease_id, tx)) = insert_crease_for_mention(
- excerpt_id,
text_anchor,
content_len,
crease_text.into(),
@@ -1145,8 +1139,7 @@ impl MessageEditor {
for (anchor, content_len, mention_uri) in all_mentions {
let Some((crease_id, tx)) = insert_crease_for_mention(
- anchor.excerpt_id,
- anchor.text_anchor,
+ snapshot.anchor_to_buffer_anchor(anchor).unwrap().0,
content_len,
mention_uri.name().into(),
mention_uri.icon_path(cx),
@@ -1339,25 +1332,23 @@ impl MessageEditor {
};
let mention_text = mention_uri.as_link().to_string();
- let (excerpt_id, text_anchor, content_len) = editor.update(cx, |editor, cx| {
+ let (text_anchor, content_len) = editor.update(cx, |editor, cx| {
let buffer = editor.buffer().read(cx);
let snapshot = buffer.snapshot(cx);
- let (excerpt_id, _, buffer_snapshot) = snapshot.as_singleton().unwrap();
- let text_anchor = editor
- .selections
- .newest_anchor()
- .start
- .text_anchor
+ let buffer_snapshot = snapshot.as_singleton().unwrap();
+ let text_anchor = snapshot
+ .anchor_to_buffer_anchor(editor.selections.newest_anchor().start)
+ .unwrap()
+ .0
.bias_left(&buffer_snapshot);
editor.insert(&mention_text, window, cx);
editor.insert(" ", window, cx);
- (excerpt_id, text_anchor, mention_text.len())
+ (text_anchor, mention_text.len())
});
let Some((crease_id, tx)) = insert_crease_for_mention(
- excerpt_id,
text_anchor,
content_len,
mention_uri.name().into(),
@@ -1649,7 +1640,9 @@ impl MessageEditor {
let mention_uri = if let Some(uri) = uri {
MentionUri::parse(&uri, path_style)
} else {
- Ok(MentionUri::PastedImage)
+ Ok(MentionUri::PastedImage {
+ name: "Image".to_string(),
+ })
};
let Some(mention_uri) = mention_uri.log_err() else {
continue;
@@ -1700,8 +1693,7 @@ impl MessageEditor {
let adjusted_start = insertion_start + range.start;
let anchor = snapshot.anchor_before(MultiBufferOffset(adjusted_start));
let Some((crease_id, tx)) = insert_crease_for_mention(
- anchor.excerpt_id,
- anchor.text_anchor,
+ snapshot.anchor_to_buffer_anchor(anchor).unwrap().0,
range.end - range.start,
mention_uri.name().into(),
mention_uri.icon_path(cx),
@@ -2077,23 +2069,13 @@ mod tests {
cx.run_until_parked();
- let excerpt_id = editor.update(cx, |editor, cx| {
- editor
- .buffer()
- .read(cx)
- .excerpt_ids()
- .into_iter()
- .next()
- .unwrap()
- });
let completions = editor.update_in(cx, |editor, window, cx| {
editor.set_text("Hello @file ", window, cx);
let buffer = editor.buffer().read(cx).as_singleton().unwrap();
let completion_provider = editor.completion_provider().unwrap();
completion_provider.completions(
- excerpt_id,
&buffer,
- text::Anchor::MAX,
+ text::Anchor::max_for_buffer(buffer.read(cx).remote_id()),
CompletionContext {
trigger_kind: CompletionTriggerKind::TRIGGER_CHARACTER,
trigger_character: Some("@".into()),
@@ -2114,7 +2096,7 @@ mod tests {
editor.update_in(cx, |editor, window, cx| {
let snapshot = editor.buffer().read(cx).snapshot(cx);
let range = snapshot
- .anchor_range_in_excerpt(excerpt_id, completion.replace_range)
+ .buffer_anchor_range_to_anchor_range(completion.replace_range)
.unwrap();
editor.edit([(range, completion.new_text)], cx);
(completion.confirm.unwrap())(CompletionIntent::Complete, window, cx);
@@ -4096,6 +4078,11 @@ mod tests {
&mut cx,
);
+ let image_name = temporary_image_path
+ .file_name()
+ .and_then(|n| n.to_str())
+ .unwrap_or("Image")
+ .to_string();
std::fs::remove_file(&temporary_image_path).expect("remove temp png");
let expected_file_uri = MentionUri::File {
@@ -4103,12 +4090,16 @@ mod tests {
}
.to_uri()
.to_string();
- let expected_image_uri = MentionUri::PastedImage.to_uri().to_string();
+ let expected_image_uri = MentionUri::PastedImage {
+ name: image_name.clone(),
+ }
+ .to_uri()
+ .to_string();
editor.update(&mut cx, |editor, cx| {
assert_eq!(
editor.text(cx),
- format!("[@Image]({expected_image_uri}) [@file.txt]({expected_file_uri}) ")
+ format!("[@{image_name}]({expected_image_uri}) [@file.txt]({expected_file_uri}) ")
);
});
@@ -4116,7 +4107,7 @@ mod tests {
assert_eq!(contents.len(), 2);
assert!(contents.iter().any(|(uri, mention)| {
- *uri == MentionUri::PastedImage && matches!(mention, Mention::Image(_))
+ matches!(uri, MentionUri::PastedImage { .. }) && matches!(mention, Mention::Image(_))
}));
assert!(contents.iter().any(|(uri, mention)| {
*uri == MentionUri::File {
diff --git a/crates/agent_ui/src/profile_selector.rs b/crates/agent_ui/src/profile_selector.rs
index 1bad3c45e4dece2397a2e026d659fd0fad043a24..963e32af55fda90f49edb0787f7327190c92681f 100644
--- a/crates/agent_ui/src/profile_selector.rs
+++ b/crates/agent_ui/src/profile_selector.rs
@@ -90,6 +90,7 @@ impl ProfileSelector {
if let Some((next_profile_id, _)) = profiles.get_index(next_index) {
self.provider.set_profile(next_profile_id.clone(), cx);
+ cx.notify();
}
}
diff --git a/crates/agent_ui/src/thread_import.rs b/crates/agent_ui/src/thread_import.rs
index f5fc89d3df4991ff5186e2af6d73ad6a840c09a1..5402b1c74353b73a522a068aa32dfd0a9dc85c60 100644
--- a/crates/agent_ui/src/thread_import.rs
+++ b/crates/agent_ui/src/thread_import.rs
@@ -17,7 +17,7 @@ use ui::{
prelude::*,
};
use util::ResultExt;
-use workspace::{ModalView, MultiWorkspace, Workspace};
+use workspace::{ModalView, MultiWorkspace, PathList, Workspace};
use crate::{
Agent, AgentPanel,
@@ -500,6 +500,7 @@ fn collect_importable_threads(
updated_at: session.updated_at.unwrap_or_else(|| Utc::now()),
created_at: session.created_at,
folder_paths,
+ main_worktree_paths: PathList::default(),
archived: true,
});
}
diff --git a/crates/agent_ui/src/thread_metadata_store.rs b/crates/agent_ui/src/thread_metadata_store.rs
index 4c66d57bcfafe98432319a173e7736a581f1d986..fcd9665c52451d62fe8185abca919148a1666126 100644
--- a/crates/agent_ui/src/thread_metadata_store.rs
+++ b/crates/agent_ui/src/thread_metadata_store.rs
@@ -66,6 +66,7 @@ fn migrate_thread_metadata(cx: &mut App) {
updated_at: entry.updated_at,
created_at: entry.created_at,
folder_paths: entry.folder_paths,
+ main_worktree_paths: PathList::default(),
archived: true,
})
})
@@ -126,6 +127,7 @@ pub struct ThreadMetadata {
pub updated_at: DateTime,
pub created_at: Option>,
pub folder_paths: PathList,
+ pub main_worktree_paths: PathList,
pub archived: bool,
}
@@ -149,6 +151,7 @@ pub struct ThreadMetadataStore {
db: ThreadMetadataDb,
threads: HashMap,
threads_by_paths: HashMap>,
+ threads_by_main_paths: HashMap>,
reload_task: Option>>,
session_subscriptions: HashMap,
pending_thread_ops_tx: smol::channel::Sender,
@@ -238,6 +241,21 @@ impl ThreadMetadataStore {
.filter(|s| !s.archived)
}
+ /// Returns threads whose `main_worktree_paths` matches the given path list,
+ /// excluding archived threads. This finds threads that were opened in a
+ /// linked worktree but are associated with the given main worktree.
+ pub fn entries_for_main_worktree_path(
+ &self,
+ path_list: &PathList,
+ ) -> impl Iterator- + '_ {
+ self.threads_by_main_paths
+ .get(path_list)
+ .into_iter()
+ .flatten()
+ .filter_map(|s| self.threads.get(s))
+ .filter(|s| !s.archived)
+ }
+
fn reload(&mut self, cx: &mut Context) -> Shared> {
let db = self.db.clone();
self.reload_task.take();
@@ -254,12 +272,19 @@ impl ThreadMetadataStore {
this.update(cx, |this, cx| {
this.threads.clear();
this.threads_by_paths.clear();
+ this.threads_by_main_paths.clear();
for row in rows {
this.threads_by_paths
.entry(row.folder_paths.clone())
.or_default()
.insert(row.session_id.clone());
+ if !row.main_worktree_paths.is_empty() {
+ this.threads_by_main_paths
+ .entry(row.main_worktree_paths.clone())
+ .or_default()
+ .insert(row.session_id.clone());
+ }
this.threads.insert(row.session_id.clone(), row);
}
@@ -298,12 +323,22 @@ impl ThreadMetadataStore {
}
fn save_internal(&mut self, metadata: ThreadMetadata) {
- // If the folder paths have changed, we need to clear the old entry
- if let Some(thread) = self.threads.get(&metadata.session_id)
- && thread.folder_paths != metadata.folder_paths
- && let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths)
- {
- session_ids.remove(&metadata.session_id);
+ if let Some(thread) = self.threads.get(&metadata.session_id) {
+ if thread.folder_paths != metadata.folder_paths {
+ if let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths) {
+ session_ids.remove(&metadata.session_id);
+ }
+ }
+ if thread.main_worktree_paths != metadata.main_worktree_paths
+ && !thread.main_worktree_paths.is_empty()
+ {
+ if let Some(session_ids) = self
+ .threads_by_main_paths
+ .get_mut(&thread.main_worktree_paths)
+ {
+ session_ids.remove(&metadata.session_id);
+ }
+ }
}
self.threads
@@ -314,6 +349,13 @@ impl ThreadMetadataStore {
.or_default()
.insert(metadata.session_id.clone());
+ if !metadata.main_worktree_paths.is_empty() {
+ self.threads_by_main_paths
+ .entry(metadata.main_worktree_paths.clone())
+ .or_default()
+ .insert(metadata.session_id.clone());
+ }
+
self.pending_thread_ops_tx
.try_send(DbOperation::Upsert(metadata))
.log_err();
@@ -370,10 +412,18 @@ impl ThreadMetadataStore {
return;
}
- if let Some(thread) = self.threads.get(&session_id)
- && let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths)
- {
- session_ids.remove(&session_id);
+ if let Some(thread) = self.threads.get(&session_id) {
+ if let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths) {
+ session_ids.remove(&session_id);
+ }
+ if !thread.main_worktree_paths.is_empty() {
+ if let Some(session_ids) = self
+ .threads_by_main_paths
+ .get_mut(&thread.main_worktree_paths)
+ {
+ session_ids.remove(&session_id);
+ }
+ }
}
self.threads.remove(&session_id);
self.pending_thread_ops_tx
@@ -397,14 +447,9 @@ impl ThreadMetadataStore {
let weak_store = weak_store.clone();
move |thread, cx| {
weak_store
- .update(cx, |store, cx| {
+ .update(cx, |store, _cx| {
let session_id = thread.session_id().clone();
store.session_subscriptions.remove(&session_id);
- if thread.entries().is_empty() {
- // Empty threads can be unloaded without ever being
- // durably persisted by the underlying agent.
- store.delete(session_id, cx);
- }
})
.ok();
}
@@ -449,6 +494,7 @@ impl ThreadMetadataStore {
db,
threads: HashMap::default(),
threads_by_paths: HashMap::default(),
+ threads_by_main_paths: HashMap::default(),
reload_task: None,
session_subscriptions: HashMap::default(),
pending_thread_ops_tx: tx,
@@ -494,6 +540,10 @@ impl ThreadMetadataStore {
| AcpThreadEvent::Refusal
| AcpThreadEvent::WorkingDirectoriesUpdated => {
let thread_ref = thread.read(cx);
+ if thread_ref.entries().is_empty() {
+ return;
+ }
+
let existing_thread = self.threads.get(thread_ref.session_id());
let session_id = thread_ref.session_id().clone();
let title = thread_ref
@@ -517,6 +567,20 @@ impl ThreadMetadataStore {
PathList::new(&paths)
};
+ let main_worktree_paths = {
+ let project = thread_ref.project().read(cx);
+ let mut main_paths: Vec> = Vec::new();
+ for repo in project.repositories(cx).values() {
+ let snapshot = repo.read(cx).snapshot();
+ if snapshot.is_linked_worktree() {
+ main_paths.push(snapshot.original_repo_abs_path.clone());
+ }
+ }
+ main_paths.sort();
+ main_paths.dedup();
+ PathList::new(&main_paths)
+ };
+
// Threads without a folder path (e.g. started in an empty
// window) are archived by default so they don't get lost,
// because they won't show up in the sidebar. Users can reload
@@ -532,6 +596,7 @@ impl ThreadMetadataStore {
created_at: Some(created_at),
updated_at,
folder_paths,
+ main_worktree_paths,
archived,
};
@@ -567,6 +632,8 @@ impl Domain for ThreadMetadataDb {
) STRICT;
),
sql!(ALTER TABLE sidebar_threads ADD COLUMN archived INTEGER DEFAULT 0),
+ sql!(ALTER TABLE sidebar_threads ADD COLUMN main_worktree_paths TEXT),
+ sql!(ALTER TABLE sidebar_threads ADD COLUMN main_worktree_paths_order TEXT),
];
}
@@ -583,7 +650,7 @@ impl ThreadMetadataDb {
/// List all sidebar thread metadata, ordered by updated_at descending.
pub fn list(&self) -> anyhow::Result> {
self.select::(
- "SELECT session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived \
+ "SELECT session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived, main_worktree_paths, main_worktree_paths_order \
FROM sidebar_threads \
ORDER BY updated_at DESC"
)?()
@@ -606,11 +673,18 @@ impl ThreadMetadataDb {
} else {
(Some(serialized.paths), Some(serialized.order))
};
+ let main_serialized = row.main_worktree_paths.serialize();
+ let (main_worktree_paths, main_worktree_paths_order) = if row.main_worktree_paths.is_empty()
+ {
+ (None, None)
+ } else {
+ (Some(main_serialized.paths), Some(main_serialized.order))
+ };
let archived = row.archived;
self.write(move |conn| {
- let sql = "INSERT INTO sidebar_threads(session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived) \
- VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8) \
+ let sql = "INSERT INTO sidebar_threads(session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived, main_worktree_paths, main_worktree_paths_order) \
+ VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10) \
ON CONFLICT(session_id) DO UPDATE SET \
agent_id = excluded.agent_id, \
title = excluded.title, \
@@ -618,7 +692,9 @@ impl ThreadMetadataDb {
created_at = excluded.created_at, \
folder_paths = excluded.folder_paths, \
folder_paths_order = excluded.folder_paths_order, \
- archived = excluded.archived";
+ archived = excluded.archived, \
+ main_worktree_paths = excluded.main_worktree_paths, \
+ main_worktree_paths_order = excluded.main_worktree_paths_order";
let mut stmt = Statement::prepare(conn, sql)?;
let mut i = stmt.bind(&id, 1)?;
i = stmt.bind(&agent_id, i)?;
@@ -627,7 +703,9 @@ impl ThreadMetadataDb {
i = stmt.bind(&created_at, i)?;
i = stmt.bind(&folder_paths, i)?;
i = stmt.bind(&folder_paths_order, i)?;
- stmt.bind(&archived, i)?;
+ i = stmt.bind(&archived, i)?;
+ i = stmt.bind(&main_worktree_paths, i)?;
+ stmt.bind(&main_worktree_paths_order, i)?;
stmt.exec()
})
.await
@@ -657,6 +735,10 @@ impl Column for ThreadMetadata {
let (folder_paths_order_str, next): (Option, i32) =
Column::column(statement, next)?;
let (archived, next): (bool, i32) = Column::column(statement, next)?;
+ let (main_worktree_paths_str, next): (Option, i32) =
+ Column::column(statement, next)?;
+ let (main_worktree_paths_order_str, next): (Option, i32) =
+ Column::column(statement, next)?;
let agent_id = agent_id
.map(|id| AgentId::new(id))
@@ -678,6 +760,15 @@ impl Column for ThreadMetadata {
})
.unwrap_or_default();
+ let main_worktree_paths = main_worktree_paths_str
+ .map(|paths| {
+ PathList::deserialize(&util::path_list::SerializedPathList {
+ paths,
+ order: main_worktree_paths_order_str.unwrap_or_default(),
+ })
+ })
+ .unwrap_or_default();
+
Ok((
ThreadMetadata {
session_id: acp::SessionId::new(id),
@@ -686,6 +777,7 @@ impl Column for ThreadMetadata {
updated_at,
created_at,
folder_paths,
+ main_worktree_paths,
archived,
},
next,
@@ -742,6 +834,7 @@ mod tests {
updated_at,
created_at: Some(updated_at),
folder_paths,
+ main_worktree_paths: PathList::default(),
}
}
@@ -957,6 +1050,7 @@ mod tests {
updated_at: now - chrono::Duration::seconds(10),
created_at: Some(now - chrono::Duration::seconds(10)),
folder_paths: project_a_paths.clone(),
+ main_worktree_paths: PathList::default(),
archived: false,
};
@@ -1066,6 +1160,7 @@ mod tests {
updated_at: existing_updated_at,
created_at: Some(existing_updated_at),
folder_paths: project_paths.clone(),
+ main_worktree_paths: PathList::default(),
archived: false,
};
@@ -1197,7 +1292,7 @@ mod tests {
}
#[gpui::test]
- async fn test_empty_thread_metadata_deleted_when_thread_released(cx: &mut TestAppContext) {
+ async fn test_empty_thread_events_do_not_create_metadata(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
@@ -1227,11 +1322,16 @@ mod tests {
.entry_ids()
.collect::>()
});
- assert_eq!(metadata_ids, vec![session_id]);
+ assert!(
+ metadata_ids.is_empty(),
+ "expected empty draft thread title updates to be ignored"
+ );
- drop(thread);
- cx.update(|_| {});
- cx.run_until_parked();
+ cx.update(|cx| {
+ thread.update(cx, |thread, cx| {
+ thread.push_user_content_block(None, "Hello".into(), cx);
+ });
+ });
cx.run_until_parked();
let metadata_ids = cx.update(|cx| {
@@ -1240,10 +1340,7 @@ mod tests {
.entry_ids()
.collect::>()
});
- assert!(
- metadata_ids.is_empty(),
- "expected empty draft thread metadata to be deleted on release"
- );
+ assert_eq!(metadata_ids, vec![session_id]);
}
#[gpui::test]
@@ -1318,6 +1415,7 @@ mod tests {
cx.update(|cx| {
thread_without_worktree.update(cx, |thread, cx| {
+ thread.push_user_content_block(None, "content".into(), cx);
thread.set_title("No Project Thread".into(), cx).detach();
});
});
@@ -1338,6 +1436,7 @@ mod tests {
cx.update(|cx| {
thread_with_worktree.update(cx, |thread, cx| {
+ thread.push_user_content_block(None, "content".into(), cx);
thread.set_title("Project Thread".into(), cx).detach();
});
});
@@ -1393,6 +1492,7 @@ mod tests {
// Set a title on the regular thread to trigger a save via handle_thread_update.
cx.update(|cx| {
regular_thread.update(cx, |thread, cx| {
+ thread.push_user_content_block(None, "content".into(), cx);
thread.set_title("Regular Thread".into(), cx).detach();
});
});
diff --git a/crates/agent_ui/src/threads_archive_view.rs b/crates/agent_ui/src/threads_archive_view.rs
index 9aca31e1edbe729fccecfc0dd8f0530d2aed2564..13b2aa1a37cd506c338d13db78bce751882e426a 100644
--- a/crates/agent_ui/src/threads_archive_view.rs
+++ b/crates/agent_ui/src/threads_archive_view.rs
@@ -91,14 +91,16 @@ impl TimeBucket {
}
fn fuzzy_match_positions(query: &str, text: &str) -> Option> {
- let query = query.to_lowercase();
- let text_lower = text.to_lowercase();
let mut positions = Vec::new();
let mut query_chars = query.chars().peekable();
- for (i, c) in text_lower.chars().enumerate() {
- if query_chars.peek() == Some(&c) {
- positions.push(i);
- query_chars.next();
+ for (byte_idx, candidate_char) in text.char_indices() {
+ if let Some(&query_char) = query_chars.peek() {
+ if candidate_char.eq_ignore_ascii_case(&query_char) {
+ positions.push(byte_idx);
+ query_chars.next();
+ }
+ } else {
+ break;
}
}
if query_chars.peek().is_none() {
@@ -216,6 +218,13 @@ impl ThreadsArchiveView {
handle.focus(window, cx);
}
+ pub fn is_filter_editor_focused(&self, window: &Window, cx: &App) -> bool {
+ self.filter_editor
+ .read(cx)
+ .focus_handle(cx)
+ .is_focused(window)
+ }
+
fn update_items(&mut self, cx: &mut Context) {
let sessions = ThreadMetadataStore::global(cx)
.read(cx)
@@ -344,7 +353,6 @@ impl ThreadsArchiveView {
.map(|mw| {
mw.read(cx)
.workspaces()
- .iter()
.filter_map(|ws| ws.read(cx).database_id())
.collect()
})
@@ -1283,3 +1291,59 @@ impl PickerDelegate for ProjectPickerDelegate {
)
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_fuzzy_match_positions_returns_byte_indices() {
+ // "🔥abc" — the fire emoji is 4 bytes, so 'a' starts at byte 4, 'b' at 5, 'c' at 6.
+ let text = "🔥abc";
+ let positions = fuzzy_match_positions("ab", text).expect("should match");
+ assert_eq!(positions, vec![4, 5]);
+
+ // Verify positions are valid char boundaries (this is the assertion that
+ // panicked before the fix).
+ for &pos in &positions {
+ assert!(
+ text.is_char_boundary(pos),
+ "position {pos} is not a valid UTF-8 boundary in {text:?}"
+ );
+ }
+ }
+
+ #[test]
+ fn test_fuzzy_match_positions_ascii_still_works() {
+ let positions = fuzzy_match_positions("he", "hello").expect("should match");
+ assert_eq!(positions, vec![0, 1]);
+ }
+
+ #[test]
+ fn test_fuzzy_match_positions_case_insensitive() {
+ let positions = fuzzy_match_positions("HE", "hello").expect("should match");
+ assert_eq!(positions, vec![0, 1]);
+ }
+
+ #[test]
+ fn test_fuzzy_match_positions_no_match() {
+ assert!(fuzzy_match_positions("xyz", "hello").is_none());
+ }
+
+ #[test]
+ fn test_fuzzy_match_positions_multi_byte_interior() {
+ // "café" — 'é' is 2 bytes (0xC3 0xA9), so 'f' starts at byte 4, 'é' at byte 5.
+ let text = "café";
+ let positions = fuzzy_match_positions("fé", text).expect("should match");
+ // 'c'=0, 'a'=1, 'f'=2, 'é'=3..4 — wait, let's verify:
+ // Actually: c=1 byte, a=1 byte, f=1 byte, é=2 bytes
+ // So byte positions: c=0, a=1, f=2, é=3
+ assert_eq!(positions, vec![2, 3]);
+ for &pos in &positions {
+ assert!(
+ text.is_char_boundary(pos),
+ "position {pos} is not a valid UTF-8 boundary in {text:?}"
+ );
+ }
+ }
+}
diff --git a/crates/agent_ui/src/ui/mention_crease.rs b/crates/agent_ui/src/ui/mention_crease.rs
index 6e99647304d93fe91cd6b91dbd2bf3bfd82c7ab0..bd48a558f5d9b1f042f974dc6e174f8ba8078adf 100644
--- a/crates/agent_ui/src/ui/mention_crease.rs
+++ b/crates/agent_ui/src/ui/mention_crease.rs
@@ -184,7 +184,7 @@ fn open_mention_uri(
MentionUri::Fetch { url } => {
cx.open_url(url.as_str());
}
- MentionUri::PastedImage
+ MentionUri::PastedImage { .. }
| MentionUri::Selection { abs_path: None, .. }
| MentionUri::Diagnostics { .. }
| MentionUri::TerminalSelection { .. }
diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs
index 1cb1e801c2cd68d442321da76c0abb848f9fa0d8..c168bd2956e0687eca5e5adeb16edbe70e9edd54 100644
--- a/crates/buffer_diff/src/buffer_diff.rs
+++ b/crates/buffer_diff/src/buffer_diff.rs
@@ -171,9 +171,9 @@ impl sum_tree::Item for PendingHunk {
impl sum_tree::Summary for DiffHunkSummary {
type Context<'a> = &'a text::BufferSnapshot;
- fn zero(_cx: Self::Context<'_>) -> Self {
+ fn zero(buffer: &text::BufferSnapshot) -> Self {
DiffHunkSummary {
- buffer_range: Anchor::MIN..Anchor::MIN,
+ buffer_range: Anchor::min_min_range_for_buffer(buffer.remote_id()),
diff_base_byte_range: 0..0,
added_rows: 0,
removed_rows: 0,
@@ -248,6 +248,10 @@ impl BufferDiffSnapshot {
buffer_diff.update(cx, |buffer_diff, cx| buffer_diff.snapshot(cx))
}
+ pub fn buffer_id(&self) -> BufferId {
+ self.inner.buffer_snapshot.remote_id()
+ }
+
pub fn is_empty(&self) -> bool {
self.inner.hunks.is_empty()
}
@@ -953,7 +957,7 @@ impl BufferDiffInner {
.flat_map(move |hunk| {
[
(
- &hunk.buffer_range.start,
+ hunk.buffer_range.start,
(
hunk.buffer_range.start,
hunk.diff_base_byte_range.start,
@@ -961,7 +965,7 @@ impl BufferDiffInner {
),
),
(
- &hunk.buffer_range.end,
+ hunk.buffer_range.end,
(hunk.buffer_range.end, hunk.diff_base_byte_range.end, hunk),
),
]
@@ -1653,7 +1657,7 @@ impl BufferDiff {
) {
let hunks = self
.snapshot(cx)
- .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer)
+ .hunks_intersecting_range(Anchor::min_max_range_for_buffer(buffer.remote_id()), buffer)
.collect::>();
let Some(secondary) = self.secondary_diff.clone() else {
return;
diff --git a/crates/call/src/call_impl/room.rs b/crates/call/src/call_impl/room.rs
index f92a8163d54de0c21c7318c4baab5aad5ce49b75..37a3fd823ec03d3b1d94419ac47662431d718708 100644
--- a/crates/call/src/call_impl/room.rs
+++ b/crates/call/src/call_impl/room.rs
@@ -21,7 +21,7 @@ use language::LanguageRegistry;
use livekit::{LocalTrackPublication, ParticipantIdentity, RoomEvent};
use livekit_client::{self as livekit, AudioStream, TrackSid};
use postage::{sink::Sink, stream::Stream, watch};
-use project::Project;
+use project::{CURRENT_PROJECT_FEATURES, Project};
use settings::Settings as _;
use std::sync::atomic::AtomicU64;
use std::{future::Future, mem, rc::Rc, sync::Arc, time::Duration, time::Instant};
@@ -1237,6 +1237,10 @@ impl Room {
worktrees: project.read(cx).worktree_metadata_protos(cx),
is_ssh_project: project.read(cx).is_via_remote_server(),
windows_paths: Some(project.read(cx).path_style(cx) == PathStyle::Windows),
+ features: CURRENT_PROJECT_FEATURES
+ .iter()
+ .map(|s| s.to_string())
+ .collect(),
});
cx.spawn(async move |this, cx| {
diff --git a/crates/cli/src/cli.rs b/crates/cli/src/cli.rs
index 1a3ce059b8116ac7438f3eb0330b47660cc863de..d8da78c53210230597dab49ce297d9fa694e62f1 100644
--- a/crates/cli/src/cli.rs
+++ b/crates/cli/src/cli.rs
@@ -21,6 +21,7 @@ pub enum CliRequest {
reuse: bool,
env: Option>,
user_data_dir: Option,
+ dev_container: bool,
},
}
diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs
index b8af5896285d3080ca3320a5909b3f58f72de643..41f2d14c1908ac18e7ea297eef19d8d9bd1cf8b5 100644
--- a/crates/cli/src/main.rs
+++ b/crates/cli/src/main.rs
@@ -118,6 +118,12 @@ struct Args {
/// Will attempt to give the correct command to run
#[arg(long)]
system_specs: bool,
+ /// Open the project in a dev container.
+ ///
+ /// Automatically triggers "Reopen in Dev Container" if a `.devcontainer/`
+ /// configuration is found in the project directory.
+ #[arg(long)]
+ dev_container: bool,
/// Pairs of file paths to diff. Can be specified multiple times.
/// When directories are provided, recurses into them and shows all changed files in a single multi-diff view.
#[arg(long, action = clap::ArgAction::Append, num_args = 2, value_names = ["OLD_PATH", "NEW_PATH"])]
@@ -670,6 +676,7 @@ fn main() -> Result<()> {
reuse: args.reuse,
env,
user_data_dir: user_data_dir_for_thread,
+ dev_container: args.dev_container,
})?;
while let Ok(response) = rx.recv() {
diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml
index 1edbb3399e4332e2ebd23f812c66697bda72d587..7bbaccb22e0e6c7508240186103e216f83be2f0c 100644
--- a/crates/client/Cargo.toml
+++ b/crates/client/Cargo.toml
@@ -22,6 +22,7 @@ base64.workspace = true
chrono = { workspace = true, features = ["serde"] }
clock.workspace = true
cloud_api_client.workspace = true
+cloud_api_types.workspace = true
cloud_llm_client.workspace = true
collections.workspace = true
credentials_provider.workspace = true
@@ -35,6 +36,7 @@ gpui_tokio.workspace = true
http_client.workspace = true
http_client_tls.workspace = true
httparse = "1.10"
+language_model.workspace = true
log.workspace = true
parking_lot.workspace = true
paths.workspace = true
@@ -60,6 +62,7 @@ tokio.workspace = true
url.workspace = true
util.workspace = true
worktree.workspace = true
+zed_credentials_provider.workspace = true
[dev-dependencies]
clock = { workspace = true, features = ["test-support"] }
diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs
index f40d90a983978e8928477b5a2973dfa05e05b907..dfd9963a0ee52d167f8d4edb0b850f4debed7fd4 100644
--- a/crates/client/src/client.rs
+++ b/crates/client/src/client.rs
@@ -1,6 +1,7 @@
#[cfg(any(test, feature = "test-support"))]
pub mod test;
+mod llm_token;
mod proxy;
pub mod telemetry;
pub mod user;
@@ -13,8 +14,9 @@ use async_tungstenite::tungstenite::{
http::{HeaderValue, Request, StatusCode},
};
use clock::SystemClock;
-use cloud_api_client::CloudApiClient;
use cloud_api_client::websocket_protocol::MessageToClient;
+use cloud_api_client::{ClientApiError, CloudApiClient};
+use cloud_api_types::OrganizationId;
use credentials_provider::CredentialsProvider;
use feature_flags::FeatureFlagAppExt as _;
use futures::{
@@ -24,6 +26,7 @@ use futures::{
};
use gpui::{App, AsyncApp, Entity, Global, Task, WeakEntity, actions};
use http_client::{HttpClient, HttpClientWithUrl, http, read_proxy_from_env};
+use language_model::LlmApiToken;
use parking_lot::{Mutex, RwLock};
use postage::watch;
use proxy::connect_proxy_stream;
@@ -51,6 +54,7 @@ use tokio::net::TcpStream;
use url::Url;
use util::{ConnectionResult, ResultExt};
+pub use llm_token::*;
pub use rpc::*;
pub use telemetry_events::Event;
pub use user::*;
@@ -339,7 +343,7 @@ pub struct ClientCredentialsProvider {
impl ClientCredentialsProvider {
pub fn new(cx: &App) -> Self {
Self {
- provider: ::global(cx),
+ provider: zed_credentials_provider::global(cx),
}
}
@@ -568,6 +572,10 @@ impl Client {
self.http.clone()
}
+ pub fn credentials_provider(&self) -> Arc {
+ self.credentials_provider.provider.clone()
+ }
+
pub fn cloud_client(&self) -> Arc {
self.cloud_client.clone()
}
@@ -1513,6 +1521,66 @@ impl Client {
})
}
+ pub async fn acquire_llm_token(
+ &self,
+ llm_token: &LlmApiToken,
+ organization_id: Option,
+ ) -> Result {
+ let system_id = self.telemetry().system_id().map(|x| x.to_string());
+ let cloud_client = self.cloud_client();
+ match llm_token
+ .acquire(&cloud_client, system_id, organization_id)
+ .await
+ {
+ Ok(token) => Ok(token),
+ Err(ClientApiError::Unauthorized) => {
+ self.request_sign_out();
+ Err(ClientApiError::Unauthorized).context("Failed to create LLM token")
+ }
+ Err(err) => Err(anyhow::Error::from(err)),
+ }
+ }
+
+ pub async fn refresh_llm_token(
+ &self,
+ llm_token: &LlmApiToken,
+ organization_id: Option,
+ ) -> Result {
+ let system_id = self.telemetry().system_id().map(|x| x.to_string());
+ let cloud_client = self.cloud_client();
+ match llm_token
+ .refresh(&cloud_client, system_id, organization_id)
+ .await
+ {
+ Ok(token) => Ok(token),
+ Err(ClientApiError::Unauthorized) => {
+ self.request_sign_out();
+ return Err(ClientApiError::Unauthorized).context("Failed to create LLM token");
+ }
+ Err(err) => return Err(anyhow::Error::from(err)),
+ }
+ }
+
+ pub async fn clear_and_refresh_llm_token(
+ &self,
+ llm_token: &LlmApiToken,
+ organization_id: Option,
+ ) -> Result {
+ let system_id = self.telemetry().system_id().map(|x| x.to_string());
+ let cloud_client = self.cloud_client();
+ match llm_token
+ .clear_and_refresh(&cloud_client, system_id, organization_id)
+ .await
+ {
+ Ok(token) => Ok(token),
+ Err(ClientApiError::Unauthorized) => {
+ self.request_sign_out();
+ return Err(ClientApiError::Unauthorized).context("Failed to create LLM token");
+ }
+ Err(err) => return Err(anyhow::Error::from(err)),
+ }
+ }
+
pub async fn sign_out(self: &Arc, cx: &AsyncApp) {
self.state.write().credentials = None;
self.cloud_client.clear_credentials();
@@ -2141,11 +2209,13 @@ mod tests {
project_id: 1,
committer_name: None,
committer_email: None,
+ features: Vec::new(),
});
server.send(proto::JoinProject {
project_id: 2,
committer_name: None,
committer_email: None,
+ features: Vec::new(),
});
done_rx1.recv().await.unwrap();
done_rx2.recv().await.unwrap();
diff --git a/crates/client/src/llm_token.rs b/crates/client/src/llm_token.rs
new file mode 100644
index 0000000000000000000000000000000000000000..f62aa6dd4dc3462bc3a0f6f46c35f0e4e5499816
--- /dev/null
+++ b/crates/client/src/llm_token.rs
@@ -0,0 +1,116 @@
+use super::{Client, UserStore};
+use cloud_api_types::websocket_protocol::MessageToClient;
+use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME};
+use gpui::{
+ App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _, Subscription,
+};
+use language_model::LlmApiToken;
+use std::sync::Arc;
+
+pub trait NeedsLlmTokenRefresh {
+ /// Returns whether the LLM token needs to be refreshed.
+ fn needs_llm_token_refresh(&self) -> bool;
+}
+
+impl NeedsLlmTokenRefresh for http_client::Response {
+ fn needs_llm_token_refresh(&self) -> bool {
+ self.headers().get(EXPIRED_LLM_TOKEN_HEADER_NAME).is_some()
+ || self.headers().get(OUTDATED_LLM_TOKEN_HEADER_NAME).is_some()
+ }
+}
+
+enum TokenRefreshMode {
+ Refresh,
+ ClearAndRefresh,
+}
+
+pub fn global_llm_token(cx: &App) -> LlmApiToken {
+ RefreshLlmTokenListener::global(cx)
+ .read(cx)
+ .llm_api_token
+ .clone()
+}
+
+struct GlobalRefreshLlmTokenListener(Entity);
+
+impl Global for GlobalRefreshLlmTokenListener {}
+
+pub struct LlmTokenRefreshedEvent;
+
+pub struct RefreshLlmTokenListener {
+ client: Arc,
+ user_store: Entity,
+ llm_api_token: LlmApiToken,
+ _subscription: Subscription,
+}
+
+impl EventEmitter for RefreshLlmTokenListener {}
+
+impl RefreshLlmTokenListener {
+ pub fn register(client: Arc, user_store: Entity, cx: &mut App) {
+ let listener = cx.new(|cx| RefreshLlmTokenListener::new(client, user_store, cx));
+ cx.set_global(GlobalRefreshLlmTokenListener(listener));
+ }
+
+ pub fn global(cx: &App) -> Entity {
+ GlobalRefreshLlmTokenListener::global(cx).0.clone()
+ }
+
+ fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self {
+ client.add_message_to_client_handler({
+ let this = cx.weak_entity();
+ move |message, cx| {
+ if let Some(this) = this.upgrade() {
+ Self::handle_refresh_llm_token(this, message, cx);
+ }
+ }
+ });
+
+ let subscription = cx.subscribe(&user_store, |this, _user_store, event, cx| {
+ if matches!(event, super::user::Event::OrganizationChanged) {
+ this.refresh(TokenRefreshMode::ClearAndRefresh, cx);
+ }
+ });
+
+ Self {
+ client,
+ user_store,
+ llm_api_token: LlmApiToken::default(),
+ _subscription: subscription,
+ }
+ }
+
+ fn refresh(&self, mode: TokenRefreshMode, cx: &mut Context) {
+ let client = self.client.clone();
+ let llm_api_token = self.llm_api_token.clone();
+ let organization_id = self
+ .user_store
+ .read(cx)
+ .current_organization()
+ .map(|organization| organization.id.clone());
+ cx.spawn(async move |this, cx| {
+ match mode {
+ TokenRefreshMode::Refresh => {
+ client
+ .refresh_llm_token(&llm_api_token, organization_id)
+ .await?;
+ }
+ TokenRefreshMode::ClearAndRefresh => {
+ client
+ .clear_and_refresh_llm_token(&llm_api_token, organization_id)
+ .await?;
+ }
+ }
+ this.update(cx, |_this, cx| cx.emit(LlmTokenRefreshedEvent))
+ })
+ .detach_and_log_err(cx);
+ }
+
+ fn handle_refresh_llm_token(this: Entity, message: &MessageToClient, cx: &mut App) {
+ match message {
+ MessageToClient::UserUpdated => {
+ this.update(cx, |this, cx| this.refresh(TokenRefreshMode::Refresh, cx));
+ }
+ }
+ }
+}
diff --git a/crates/codestral/Cargo.toml b/crates/codestral/Cargo.toml
index 0daaee8fb1420c76757ca898655e8dd1a5244d7e..801221d3128b8aa2d25175e086a741d5d85da626 100644
--- a/crates/codestral/Cargo.toml
+++ b/crates/codestral/Cargo.toml
@@ -22,6 +22,7 @@ log.workspace = true
serde.workspace = true
serde_json.workspace = true
text.workspace = true
+zed_credentials_provider.workspace = true
zeta_prompt.workspace = true
[dev-dependencies]
diff --git a/crates/codestral/src/codestral.rs b/crates/codestral/src/codestral.rs
index 3930e2e873a91618bfae456bc188bbd90ffa64b9..7685fa8f5b1eae9e98a621484602e199c2b76f96 100644
--- a/crates/codestral/src/codestral.rs
+++ b/crates/codestral/src/codestral.rs
@@ -48,9 +48,10 @@ pub fn codestral_api_key(cx: &App) -> Option> {
}
pub fn load_codestral_api_key(cx: &mut App) -> Task> {
+ let credentials_provider = zed_credentials_provider::global(cx);
let api_url = codestral_api_url(cx);
codestral_api_key_state(cx).update(cx, |key_state, cx| {
- key_state.load_if_needed(api_url, |s| s, cx)
+ key_state.load_if_needed(api_url, |s| s, credentials_provider, cx)
})
}
diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql
index 75d7dbf194068f78b3d566e54bb0fa18f66a9878..75175372f24a83cfb50e8f87deae93e3f03e1a8a 100644
--- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql
+++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql
@@ -48,7 +48,8 @@ CREATE TABLE "projects" (
"host_connection_id" INTEGER,
"host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE,
"unregistered" BOOLEAN NOT NULL DEFAULT FALSE,
- "windows_paths" BOOLEAN NOT NULL DEFAULT FALSE
+ "windows_paths" BOOLEAN NOT NULL DEFAULT FALSE,
+ "features" TEXT NOT NULL DEFAULT ''
);
CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id");
@@ -64,6 +65,7 @@ CREATE TABLE "worktrees" (
"scan_id" INTEGER NOT NULL,
"is_complete" BOOL NOT NULL DEFAULT FALSE,
"completed_scan_id" INTEGER NOT NULL,
+ "root_repo_common_dir" VARCHAR,
PRIMARY KEY (project_id, id)
);
diff --git a/crates/collab/migrations/20251208000000_test_schema.sql b/crates/collab/migrations/20251208000000_test_schema.sql
index 394deaf2c0d6a80a2ab6ab1b95a333081c816e23..0110dd149b1143a3edcf76a1e0b18fbf1a22287c 100644
--- a/crates/collab/migrations/20251208000000_test_schema.sql
+++ b/crates/collab/migrations/20251208000000_test_schema.sql
@@ -332,7 +332,8 @@ CREATE TABLE public.projects (
room_id integer,
host_connection_id integer,
host_connection_server_id integer,
- windows_paths boolean DEFAULT false
+ windows_paths boolean DEFAULT false,
+ features text NOT NULL DEFAULT ''
);
CREATE SEQUENCE public.projects_id_seq
@@ -483,7 +484,8 @@ CREATE TABLE public.worktrees (
visible boolean NOT NULL,
scan_id bigint NOT NULL,
is_complete boolean DEFAULT false NOT NULL,
- completed_scan_id bigint
+ completed_scan_id bigint,
+ root_repo_common_dir character varying
);
ALTER TABLE ONLY public.breakpoints ALTER COLUMN id SET DEFAULT nextval('public.breakpoints_id_seq'::regclass);
diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs
index d8803c253f5feef8ef5e040f3ea112abcc688f52..44abc37af66e3f169d3af1a7d5e29063e382c620 100644
--- a/crates/collab/src/db.rs
+++ b/crates/collab/src/db.rs
@@ -559,6 +559,7 @@ pub struct RejoinedWorktree {
pub settings_files: Vec,
pub scan_id: u64,
pub completed_scan_id: u64,
+ pub root_repo_common_dir: Option,
}
pub struct LeftRoom {
@@ -589,6 +590,7 @@ pub struct Project {
pub repositories: Vec,
pub language_servers: Vec,
pub path_style: PathStyle,
+ pub features: Vec,
}
pub struct ProjectCollaborator {
@@ -637,6 +639,7 @@ pub struct Worktree {
pub settings_files: Vec,
pub scan_id: u64,
pub completed_scan_id: u64,
+ pub root_repo_common_dir: Option,
}
#[derive(Debug)]
diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs
index 71365fb3846c1dccbf527d76779ed8816bde243b..b1ea638072a30d6b881a711448223449aa9f53e2 100644
--- a/crates/collab/src/db/queries/projects.rs
+++ b/crates/collab/src/db/queries/projects.rs
@@ -34,6 +34,7 @@ impl Database {
worktrees: &[proto::WorktreeMetadata],
is_ssh_project: bool,
windows_paths: bool,
+ features: &[String],
) -> Result> {
self.room_transaction(room_id, |tx| async move {
let participant = room_participant::Entity::find()
@@ -71,6 +72,7 @@ impl Database {
))),
id: ActiveValue::NotSet,
windows_paths: ActiveValue::set(windows_paths),
+ features: ActiveValue::set(serde_json::to_string(features).unwrap()),
}
.insert(&*tx)
.await?;
@@ -85,6 +87,7 @@ impl Database {
visible: ActiveValue::set(worktree.visible),
scan_id: ActiveValue::set(0),
completed_scan_id: ActiveValue::set(0),
+ root_repo_common_dir: ActiveValue::set(None),
}
}))
.exec(&*tx)
@@ -201,6 +204,7 @@ impl Database {
visible: ActiveValue::set(worktree.visible),
scan_id: ActiveValue::set(0),
completed_scan_id: ActiveValue::set(0),
+ root_repo_common_dir: ActiveValue::set(None),
}))
.on_conflict(
OnConflict::columns([worktree::Column::ProjectId, worktree::Column::Id])
@@ -264,6 +268,7 @@ impl Database {
ActiveValue::default()
},
abs_path: ActiveValue::set(update.abs_path.clone()),
+ root_repo_common_dir: ActiveValue::set(update.root_repo_common_dir.clone()),
..Default::default()
})
.exec(&*tx)
@@ -759,6 +764,7 @@ impl Database {
settings_files: Default::default(),
scan_id: db_worktree.scan_id as u64,
completed_scan_id: db_worktree.completed_scan_id as u64,
+ root_repo_common_dir: db_worktree.root_repo_common_dir,
legacy_repository_entries: Default::default(),
},
)
@@ -948,6 +954,7 @@ impl Database {
} else {
PathStyle::Posix
};
+ let features: Vec = serde_json::from_str(&project.features).unwrap_or_default();
let project = Project {
id: project.id,
@@ -977,6 +984,7 @@ impl Database {
})
.collect(),
path_style,
+ features,
};
Ok((project, replica_id as ReplicaId))
}
diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs
index 3197d142cba7a1969e6fdb9423dc94497f6ca53c..94e003fd2d27c97a53f66606d11ed2e15609b728 100644
--- a/crates/collab/src/db/queries/rooms.rs
+++ b/crates/collab/src/db/queries/rooms.rs
@@ -629,6 +629,7 @@ impl Database {
settings_files: Default::default(),
scan_id: db_worktree.scan_id as u64,
completed_scan_id: db_worktree.completed_scan_id as u64,
+ root_repo_common_dir: db_worktree.root_repo_common_dir,
};
let rejoined_worktree = rejoined_project
diff --git a/crates/collab/src/db/tables/project.rs b/crates/collab/src/db/tables/project.rs
index 11a9b972ebcd7af29d6e6c234096384ce9ff7701..76d399cfc6445ca7c2516cc4cd76e885230868af 100644
--- a/crates/collab/src/db/tables/project.rs
+++ b/crates/collab/src/db/tables/project.rs
@@ -13,6 +13,7 @@ pub struct Model {
pub host_connection_id: Option,
pub host_connection_server_id: Option,
pub windows_paths: bool,
+ pub features: String,
}
impl Model {
diff --git a/crates/collab/src/db/tables/worktree.rs b/crates/collab/src/db/tables/worktree.rs
index 46d9877dff152cdc3b30531606febec65595fec1..f67a9749a48e51fce81f97ad2faf8609c50a0204 100644
--- a/crates/collab/src/db/tables/worktree.rs
+++ b/crates/collab/src/db/tables/worktree.rs
@@ -15,6 +15,7 @@ pub struct Model {
pub scan_id: i64,
/// The last scan that fully completed.
pub completed_scan_id: i64,
+ pub root_repo_common_dir: Option,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs
index 3c4efe0580c18c938f8245de9f40bf216bab9c81..20316fc3403de0e6212d13d455c5b619000d71b1 100644
--- a/crates/collab/src/rpc.rs
+++ b/crates/collab/src/rpc.rs
@@ -435,6 +435,7 @@ impl Server {
.add_request_handler(forward_mutating_project_request::)
.add_request_handler(forward_mutating_project_request::)
.add_request_handler(forward_read_only_project_request::)
+ .add_request_handler(forward_read_only_project_request::)
.add_request_handler(forward_mutating_project_request::)
.add_request_handler(disallow_guest_request::)
.add_request_handler(disallow_guest_request::)
@@ -1485,6 +1486,7 @@ fn notify_rejoined_projects(
worktree_id: worktree.id,
abs_path: worktree.abs_path.clone(),
root_name: worktree.root_name,
+ root_repo_common_dir: worktree.root_repo_common_dir,
updated_entries: worktree.updated_entries,
removed_entries: worktree.removed_entries,
scan_id: worktree.scan_id,
@@ -1775,6 +1777,7 @@ async fn share_project(
&request.worktrees,
request.is_ssh_project,
request.windows_paths.unwrap_or(false),
+ &request.features,
)
.await?;
response.send(proto::ShareProjectResponse {
@@ -1840,6 +1843,28 @@ async fn join_project(
tracing::info!(%project_id, "join project");
let db = session.db().await;
+ let project_model = db.get_project(project_id).await?;
+ let host_features: Vec =
+ serde_json::from_str(&project_model.features).unwrap_or_default();
+ let guest_features: HashSet<_> = request.features.iter().collect();
+ let host_features_set: HashSet<_> = host_features.iter().collect();
+ if guest_features != host_features_set {
+ let host_connection_id = project_model.host_connection()?;
+ let mut pool = session.connection_pool().await;
+ let host_version = pool
+ .connection(host_connection_id)
+ .map(|c| c.zed_version.to_string());
+ let guest_version = pool
+ .connection(session.connection_id)
+ .map(|c| c.zed_version.to_string());
+ drop(pool);
+ Err(anyhow!(
+ "The host (v{}) and guest (v{}) are using incompatible versions of Zed. The peer with the older version must update to collaborate.",
+ host_version.as_deref().unwrap_or("unknown"),
+ guest_version.as_deref().unwrap_or("unknown"),
+ ))?;
+ }
+
let (project, replica_id) = &mut *db
.join_project(
project_id,
@@ -1850,6 +1875,7 @@ async fn join_project(
)
.await?;
drop(db);
+
tracing::info!(%project_id, "join remote project");
let collaborators = project
.collaborators
@@ -1909,6 +1935,7 @@ async fn join_project(
language_server_capabilities,
role: project.role.into(),
windows_paths: project.path_style == PathStyle::Windows,
+ features: project.features.clone(),
})?;
for (worktree_id, worktree) in mem::take(&mut project.worktrees) {
@@ -1918,6 +1945,7 @@ async fn join_project(
worktree_id,
abs_path: worktree.abs_path.clone(),
root_name: worktree.root_name,
+ root_repo_common_dir: worktree.root_repo_common_dir,
updated_entries: worktree.entries,
removed_entries: Default::default(),
scan_id: worktree.scan_id,
diff --git a/crates/collab/tests/integration/channel_buffer_tests.rs b/crates/collab/tests/integration/channel_buffer_tests.rs
index a5aca7dd82ca23b1c348bea1fff5d2da2870c654..dd8ae9a2c02cfae6c6c7e8c369308c5092be113e 100644
--- a/crates/collab/tests/integration/channel_buffer_tests.rs
+++ b/crates/collab/tests/integration/channel_buffer_tests.rs
@@ -313,7 +313,7 @@ fn assert_remote_selections(
let snapshot = editor.snapshot(window, cx);
let hub = editor.collaboration_hub().unwrap();
let collaborators = hub.collaborators(cx);
- let range = Anchor::min()..Anchor::max();
+ let range = Anchor::Min..Anchor::Max;
let remote_selections = snapshot
.remote_selections_in_range(&range, hub, cx)
.map(|s| {
diff --git a/crates/collab/tests/integration/db_tests/db_tests.rs b/crates/collab/tests/integration/db_tests/db_tests.rs
index e2006b7fb9984c4bd0cf16a62e9321b2f7007e9e..710f95dbf7d82e05a541b844b093a04ca88565f7 100644
--- a/crates/collab/tests/integration/db_tests/db_tests.rs
+++ b/crates/collab/tests/integration/db_tests/db_tests.rs
@@ -350,20 +350,41 @@ async fn test_project_count(db: &Arc) {
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
- db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, false)
- .await
- .unwrap();
+ db.share_project(
+ room_id,
+ ConnectionId { owner_id, id: 1 },
+ &[],
+ false,
+ false,
+ &[],
+ )
+ .await
+ .unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
- db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, false)
- .await
- .unwrap();
+ db.share_project(
+ room_id,
+ ConnectionId { owner_id, id: 1 },
+ &[],
+ false,
+ false,
+ &[],
+ )
+ .await
+ .unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
// Projects shared by admins aren't counted.
- db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false, false)
- .await
- .unwrap();
+ db.share_project(
+ room_id,
+ ConnectionId { owner_id, id: 0 },
+ &[],
+ false,
+ false,
+ &[],
+ )
+ .await
+ .unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
db.leave_room(ConnectionId { owner_id, id: 1 })
diff --git a/crates/collab/tests/integration/following_tests.rs b/crates/collab/tests/integration/following_tests.rs
index c4031788c87f747c3125f4dbc509d68ea3720b43..7109b0f31452d2573426aa2300e7967b8f5a6601 100644
--- a/crates/collab/tests/integration/following_tests.rs
+++ b/crates/collab/tests/integration/following_tests.rs
@@ -2184,6 +2184,7 @@ async fn test_following_after_replacement(cx_a: &mut TestAppContext, cx_b: &mut
);
mb
});
+ let multibuffer_snapshot = multibuffer.update(cx_a, |mb, cx| mb.snapshot(cx));
let snapshot = buffer.update(cx_a, |buffer, _| buffer.snapshot());
let editor: Entity