Detailed changes
@@ -97,6 +97,8 @@ jobs:
with:
app-id: ${{ secrets.ZED_ZIPPY_APP_ID }}
private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
+ permission-contents: write
+ permission-workflows: write
- name: steps::checkout_repo
uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd
with:
@@ -35,6 +35,9 @@ jobs:
with:
app-id: ${{ secrets.ZED_ZIPPY_APP_ID }}
private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
+ permission-contents: write
+ permission-workflows: write
+ permission-pull-requests: write
- name: cherry_pick::run_cherry_pick::cherry_pick
run: ./script/cherry-pick "$BRANCH" "$COMMIT" "$CHANNEL"
env:
@@ -0,0 +1,55 @@
+# Generated from xtask::workflows::compliance_check
+# Rebuild with `cargo xtask workflows`.
+name: compliance_check
+env:
+ CARGO_TERM_COLOR: always
+on:
+ schedule:
+ - cron: 30 17 * * 2
+jobs:
+ scheduled_compliance_check:
+ if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
+ runs-on: namespace-profile-2x4-ubuntu-2404
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd
+ with:
+ clean: false
+ fetch-depth: 0
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9
+ with:
+ cache: rust
+ path: ~/.rustup
+ - id: determine-version
+ name: compliance_check::scheduled_compliance_check
+ run: |
+ VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' crates/zed/Cargo.toml | tr -d '[:space:]')
+ if [ -z "$VERSION" ]; then
+ echo "Could not determine version from crates/zed/Cargo.toml"
+ exit 1
+ fi
+ TAG="v${VERSION}-pre"
+ echo "Checking compliance for $TAG"
+ echo "tag=$TAG" >> "$GITHUB_OUTPUT"
+ - id: run-compliance-check
+ name: compliance_check::scheduled_compliance_check::run_compliance_check
+ run: cargo xtask compliance "$LATEST_TAG" --branch main --report-path target/compliance-report
+ env:
+ LATEST_TAG: ${{ steps.determine-version.outputs.tag }}
+ GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
+ GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
+ - name: compliance_check::scheduled_compliance_check::send_failure_slack_notification
+ if: failure()
+ run: |
+ MESSAGE="⚠️ Scheduled compliance check failed for upcoming preview release $LATEST_TAG: There are PRs with missing reviews."
+
+ curl -X POST -H 'Content-type: application/json' \
+ --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
+ "$SLACK_WEBHOOK"
+ env:
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+ LATEST_TAG: ${{ steps.determine-version.outputs.tag }}
+defaults:
+ run:
+ shell: bash -euxo pipefail {0}
@@ -293,6 +293,51 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 60
+ compliance_check:
+ if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
+ runs-on: namespace-profile-16x32-ubuntu-2204
+ env:
+ COMPLIANCE_FILE_PATH: compliance.md
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd
+ with:
+ clean: false
+ fetch-depth: 0
+ ref: ${{ github.ref }}
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9
+ with:
+ cache: rust
+ path: ~/.rustup
+ - id: run-compliance-check
+ name: release::compliance_check::run_compliance_check
+ run: cargo xtask compliance "$GITHUB_REF_NAME" --report-path "$COMPLIANCE_FILE_OUTPUT"
+ env:
+ GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
+ GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
+ - name: release::compliance_check::send_compliance_slack_notification
+ if: always()
+ run: |
+ if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
+ STATUS="✅ Compliance check passed for $GITHUB_REF_NAME"
+ else
+ STATUS="❌ Compliance check failed for $GITHUB_REF_NAME"
+ fi
+
+ REPORT_CONTENT=""
+ if [ -f "$COMPLIANCE_FILE_OUTPUT" ]; then
+ REPORT_CONTENT=$(cat "$REPORT_FILE")
+ fi
+
+ MESSAGE=$(printf "%s\n\n%s" "$STATUS" "$REPORT_CONTENT")
+
+ curl -X POST -H 'Content-type: application/json' \
+ --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
+ "$SLACK_WEBHOOK"
+ env:
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+ COMPLIANCE_OUTCOME: ${{ steps.run-compliance-check.outcome }}
bundle_linux_aarch64:
needs:
- run_tests_linux
@@ -613,6 +658,45 @@ jobs:
echo "All expected assets are present in the release."
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ - name: steps::checkout_repo
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd
+ with:
+ clean: false
+ fetch-depth: 0
+ ref: ${{ github.ref }}
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9
+ with:
+ cache: rust
+ path: ~/.rustup
+ - id: run-post-upload-compliance-check
+ name: release::validate_release_assets::run_post_upload_compliance_check
+ run: cargo xtask compliance "$GITHUB_REF_NAME" --report-path target/compliance-report
+ env:
+ GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
+ GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
+ - name: release::validate_release_assets::send_post_upload_compliance_notification
+ if: always()
+ run: |
+ if [ -z "$COMPLIANCE_OUTCOME" ] || [ "$COMPLIANCE_OUTCOME" == "skipped" ]; then
+ echo "Compliance check was skipped, not sending notification"
+ exit 0
+ fi
+
+ TAG="$GITHUB_REF_NAME"
+
+ if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
+ MESSAGE="✅ Post-upload compliance re-check passed for $TAG"
+ else
+ MESSAGE="❌ Post-upload compliance re-check failed for $TAG"
+ fi
+
+ curl -X POST -H 'Content-type: application/json' \
+ --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
+ "$SLACK_WEBHOOK"
+ env:
+ SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+ COMPLIANCE_OUTCOME: ${{ steps.run-post-upload-compliance-check.outcome }}
auto_release_preview:
needs:
- validate_release_assets
@@ -4,13 +4,13 @@
"command": "./script/clippy",
"args": [],
"allow_concurrent_runs": true,
- "use_new_terminal": false
+ "use_new_terminal": false,
},
{
"label": "cargo run --profile release-fast",
"command": "cargo",
"args": ["run", "--profile", "release-fast"],
"allow_concurrent_runs": true,
- "use_new_terminal": false
- }
+ "use_new_terminal": false,
+ },
]
@@ -15,7 +15,7 @@ dependencies = [
"collections",
"env_logger 0.11.8",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"image",
"indoc",
@@ -75,7 +75,7 @@ dependencies = [
"collections",
"ctor",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"language",
"log",
@@ -100,7 +100,7 @@ dependencies = [
"editor",
"extension_host",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"language",
"project",
@@ -163,7 +163,7 @@ dependencies = [
"eval_utils",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"gpui",
"gpui_tokio",
@@ -227,7 +227,7 @@ dependencies = [
"async-broadcast",
"async-trait",
"derive_more",
- "futures 0.3.31",
+ "futures 0.3.32",
"log",
"serde",
"serde_json",
@@ -260,11 +260,10 @@ dependencies = [
"chrono",
"client",
"collections",
- "credentials_provider",
"env_logger 0.11.8",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"google_ai",
"gpui",
"gpui_tokio",
@@ -289,6 +288,7 @@ dependencies = [
"util",
"uuid",
"watch",
+ "zed_credentials_provider",
]
[[package]]
@@ -344,7 +344,7 @@ dependencies = [
"feature_flags",
"file_icons",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"git",
"gpui",
@@ -629,7 +629,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"chrono",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -677,6 +677,15 @@ dependencies = [
"derive_arbitrary",
]
+[[package]]
+name = "arc-swap"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a07d1f37ff60921c83bdfc7407723bdefe89b44b98a9b772f225c8f9d67141a6"
+dependencies = [
+ "rustversion",
+]
+
[[package]]
name = "arg_enum_proc_macro"
version = "0.3.4"
@@ -750,7 +759,7 @@ name = "askpass"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"log",
"net",
@@ -945,7 +954,7 @@ name = "async-pipe"
version = "0.1.3"
source = "git+https://github.com/zed-industries/async-pipe-rs?rev=82d00a04211cf4e1236029aa03e6b6ce2a74c553#82d00a04211cf4e1236029aa03e6b6ce2a74c553"
dependencies = [
- "futures 0.3.31",
+ "futures 0.3.32",
"log",
]
@@ -1183,7 +1192,7 @@ dependencies = [
"clock",
"ctor",
"db",
- "futures 0.3.31",
+ "futures 0.3.32",
"futures-lite 1.13.0",
"gpui",
"http_client",
@@ -1862,7 +1871,7 @@ dependencies = [
"anyhow",
"aws-sdk-bedrockruntime",
"aws-smithy-types",
- "futures 0.3.31",
+ "futures 0.3.32",
"schemars",
"serde",
"serde_json",
@@ -2151,7 +2160,7 @@ version = "0.1.0"
dependencies = [
"clock",
"ctor",
- "futures 0.3.31",
+ "futures 0.3.32",
"git2",
"gpui",
"language",
@@ -2348,7 +2357,7 @@ dependencies = [
"collections",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_tokio",
"language",
@@ -2530,6 +2539,16 @@ dependencies = [
"serde",
]
+[[package]]
+name = "cargo-platform"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87a0c0e6148f11f01f32650a2ea02d532b2ad4e81d8bd41e6e565b5adc5e6082"
+dependencies = [
+ "serde",
+ "serde_core",
+]
+
[[package]]
name = "cargo_metadata"
version = "0.19.2"
@@ -2537,7 +2556,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba"
dependencies = [
"camino",
- "cargo-platform",
+ "cargo-platform 0.1.9",
+ "semver",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.17",
+]
+
+[[package]]
+name = "cargo_metadata"
+version = "0.23.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ef987d17b0a113becdd19d3d0022d04d7ef41f9efe4f3fb63ac44ba61df3ade9"
+dependencies = [
+ "camino",
+ "cargo-platform 0.3.2",
"semver",
"serde",
"serde_json",
@@ -2669,7 +2702,7 @@ dependencies = [
"client",
"clock",
"collections",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"language",
@@ -2856,6 +2889,7 @@ dependencies = [
"chrono",
"clock",
"cloud_api_client",
+ "cloud_api_types",
"cloud_llm_client",
"collections",
"credentials_provider",
@@ -2863,12 +2897,13 @@ dependencies = [
"derive_more",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_tokio",
"http_client",
"http_client_tls",
"httparse",
+ "language_model",
"log",
"objc2-foundation",
"parking_lot",
@@ -2900,6 +2935,7 @@ dependencies = [
"util",
"windows 0.61.3",
"worktree",
+ "zed_credentials_provider",
]
[[package]]
@@ -2917,7 +2953,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"cloud_api_types",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_tokio",
"http_client",
@@ -3049,7 +3085,7 @@ dependencies = [
"anyhow",
"edit_prediction",
"edit_prediction_types",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"icons",
@@ -3059,6 +3095,7 @@ dependencies = [
"serde",
"serde_json",
"text",
+ "zed_credentials_provider",
"zeta_prompt",
]
@@ -3095,7 +3132,7 @@ dependencies = [
"extension",
"file_finder",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"git_hosting_providers",
"git_ui",
@@ -3172,7 +3209,7 @@ dependencies = [
"collections",
"db",
"editor",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"livekit_client",
@@ -3280,6 +3317,25 @@ dependencies = [
"workspace",
]
+[[package]]
+name = "compliance"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "async-trait",
+ "derive_more",
+ "futures 0.3.32",
+ "indoc",
+ "itertools 0.14.0",
+ "jsonwebtoken",
+ "octocrab",
+ "regex",
+ "semver",
+ "serde",
+ "serde_json",
+ "tokio",
+]
+
[[package]]
name = "component"
version = "0.1.0"
@@ -3433,7 +3489,7 @@ dependencies = [
"async-trait",
"base64 0.22.1",
"collections",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"log",
@@ -3494,7 +3550,7 @@ dependencies = [
"edit_prediction_types",
"editor",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"icons",
"indoc",
@@ -3528,7 +3584,7 @@ dependencies = [
"collections",
"dirs 4.0.0",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"log",
@@ -3978,7 +4034,7 @@ version = "0.1.0"
dependencies = [
"cfg-if",
"crash-handler",
- "futures 0.3.31",
+ "futures 0.3.32",
"log",
"mach2 0.5.0",
"minidumper",
@@ -4035,12 +4091,8 @@ name = "credentials_provider"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
"gpui",
- "paths",
- "release_channel",
"serde",
- "serde_json",
]
[[package]]
@@ -4318,7 +4370,7 @@ dependencies = [
"collections",
"dap-types",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"language",
@@ -4360,7 +4412,7 @@ dependencies = [
"dap",
"dotenvy",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http_client",
"json_dotpath",
@@ -4531,7 +4583,7 @@ dependencies = [
"anyhow",
"dap",
"editor",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"project",
"serde_json",
@@ -4558,7 +4610,7 @@ dependencies = [
"editor",
"feature_flags",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"hex",
@@ -4613,7 +4665,7 @@ name = "deepseek"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -4733,7 +4785,7 @@ dependencies = [
"async-trait",
"env_logger 0.11.8",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"http 1.3.1",
"http_client",
@@ -5115,13 +5167,14 @@ dependencies = [
"collections",
"copilot",
"copilot_ui",
+ "credentials_provider",
"ctor",
"db",
"edit_prediction_context",
"edit_prediction_types",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"heapless",
"indoc",
@@ -5157,6 +5210,7 @@ dependencies = [
"workspace",
"worktree",
"zed_actions",
+ "zed_credentials_provider",
"zeta_prompt",
"zlog",
"zstd",
@@ -5173,6 +5227,7 @@ dependencies = [
"client",
"cloud_llm_client",
"collections",
+ "criterion",
"db",
"debug_adapter_extension",
"dirs 4.0.0",
@@ -5180,7 +5235,7 @@ dependencies = [
"extension",
"flate2",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gaoya",
"gpui",
"gpui_platform",
@@ -5232,7 +5287,7 @@ dependencies = [
"clock",
"collections",
"env_logger 0.11.8",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"indoc",
"language",
@@ -5281,7 +5336,7 @@ dependencies = [
"editor",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"indoc",
"language",
@@ -5326,7 +5381,7 @@ dependencies = [
"feature_flags",
"file_icons",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"git",
"gpui",
@@ -5582,6 +5637,13 @@ dependencies = [
"log",
]
+[[package]]
+name = "env_var"
+version = "0.1.0"
+dependencies = [
+ "gpui",
+]
+
[[package]]
name = "envy"
version = "0.4.2"
@@ -5733,7 +5795,7 @@ dependencies = [
"extension",
"feature_flags",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_platform",
"gpui_tokio",
@@ -5843,7 +5905,7 @@ dependencies = [
"collections",
"dap",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"heck 0.5.0",
"http_client",
@@ -5911,7 +5973,7 @@ dependencies = [
"dap",
"extension",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_tokio",
"http_client",
@@ -6119,7 +6181,7 @@ dependencies = [
"ctor",
"editor",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"gpui",
"menu",
@@ -6421,7 +6483,7 @@ dependencies = [
"collections",
"dunce",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"gpui",
"ignore",
@@ -6519,9 +6581,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678"
[[package]]
name = "futures"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876"
+checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d"
dependencies = [
"futures-channel",
"futures-core",
@@ -6534,9 +6596,9 @@ dependencies = [
[[package]]
name = "futures-channel"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10"
+checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d"
dependencies = [
"futures-core",
"futures-sink",
@@ -6557,15 +6619,15 @@ dependencies = [
[[package]]
name = "futures-core"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
+checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d"
[[package]]
name = "futures-executor"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f"
+checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d"
dependencies = [
"futures-core",
"futures-task",
@@ -6585,9 +6647,9 @@ dependencies = [
[[package]]
name = "futures-io"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
+checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718"
[[package]]
name = "futures-lite"
@@ -6619,9 +6681,9 @@ dependencies = [
[[package]]
name = "futures-macro"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650"
+checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b"
dependencies = [
"proc-macro2",
"quote",
@@ -6630,21 +6692,21 @@ dependencies = [
[[package]]
name = "futures-sink"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7"
+checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893"
[[package]]
name = "futures-task"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
+checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393"
[[package]]
name = "futures-util"
-version = "0.3.31"
+version = "0.3.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
+checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6"
dependencies = [
"futures 0.1.31",
"futures-channel",
@@ -6653,9 +6715,9 @@ dependencies = [
"futures-macro",
"futures-sink",
"futures-task",
+ "libc",
"memchr",
"pin-project-lite",
- "pin-utils",
"slab",
"tokio-io",
]
@@ -7082,7 +7144,7 @@ dependencies = [
"async-trait",
"collections",
"derive_more",
- "futures 0.3.31",
+ "futures 0.3.32",
"git2",
"gpui",
"http_client",
@@ -7131,7 +7193,6 @@ dependencies = [
"collections",
"db",
"editor",
- "feature_flags",
"fs",
"git",
"git_ui",
@@ -7159,7 +7220,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-trait",
- "futures 0.3.31",
+ "futures 0.3.32",
"git",
"gpui",
"http_client",
@@ -7189,9 +7250,8 @@ dependencies = [
"ctor",
"db",
"editor",
- "feature_flags",
"file_icons",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"git",
"gpui",
@@ -7396,7 +7456,7 @@ name = "google_ai"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -7466,7 +7526,7 @@ dependencies = [
"env_logger 0.11.8",
"etagere",
"foreign-types 0.5.0",
- "futures 0.3.31",
+ "futures 0.3.32",
"futures-concurrency",
"getrandom 0.3.4",
"gpui_macros",
@@ -7541,7 +7601,7 @@ dependencies = [
"calloop-wayland-source",
"collections",
"filedescriptor",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_wgpu",
"http_client",
@@ -7595,7 +7655,7 @@ dependencies = [
"dispatch2",
"etagere",
"foreign-types 0.5.0",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"image",
"itertools 0.14.0",
@@ -7664,7 +7724,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"console_error_panic_hook",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_wgpu",
"http_client",
@@ -7715,7 +7775,7 @@ dependencies = [
"anyhow",
"collections",
"etagere",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"image",
"itertools 0.14.0",
@@ -8199,7 +8259,7 @@ dependencies = [
"async-tar",
"bytes 1.11.1",
"derive_more",
- "futures 0.3.31",
+ "futures 0.3.32",
"http 1.3.1",
"http-body 1.0.1",
"log",
@@ -8316,6 +8376,7 @@ dependencies = [
"http 1.3.1",
"hyper 1.7.0",
"hyper-util",
+ "log",
"rustls 0.23.33",
"rustls-native-certs 0.8.2",
"rustls-pki-types",
@@ -8324,6 +8385,19 @@ dependencies = [
"tower-service",
]
+[[package]]
+name = "hyper-timeout"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0"
+dependencies = [
+ "hyper 1.7.0",
+ "hyper-util",
+ "pin-project-lite",
+ "tokio",
+ "tower-service",
+]
+
[[package]]
name = "hyper-tls"
version = "0.5.0"
@@ -9082,7 +9156,7 @@ dependencies = [
"async-trait",
"bytes 1.11.1",
"chrono",
- "futures 0.3.31",
+ "futures 0.3.32",
"serde",
"serde_json",
"thiserror 2.0.17",
@@ -9098,7 +9172,7 @@ dependencies = [
"anyhow",
"async-trait",
"async-tungstenite",
- "futures 0.3.31",
+ "futures 0.3.32",
"jupyter-protocol",
"serde",
"serde_json",
@@ -9216,7 +9290,7 @@ dependencies = [
"ec4rs",
"encoding_rs",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"fuzzy",
"globset",
"gpui",
@@ -9296,7 +9370,7 @@ dependencies = [
"collections",
"extension",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"language",
"log",
@@ -9316,13 +9390,13 @@ dependencies = [
"anthropic",
"anyhow",
"base64 0.22.1",
- "client",
"cloud_api_client",
"cloud_api_types",
"cloud_llm_client",
"collections",
"credentials_provider",
- "futures 0.3.31",
+ "env_var",
+ "futures 0.3.32",
"gpui",
"http_client",
"icons",
@@ -9337,7 +9411,6 @@ dependencies = [
"smol",
"thiserror 2.0.17",
"util",
- "zed_env_vars",
]
[[package]]
@@ -9366,7 +9439,7 @@ dependencies = [
"extension",
"extension_host",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"google_ai",
"gpui",
"gpui_tokio",
@@ -9443,7 +9516,7 @@ dependencies = [
"command_palette_hooks",
"edit_prediction",
"editor",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"itertools 0.14.0",
"language",
@@ -9479,7 +9552,7 @@ dependencies = [
"chrono",
"collections",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"globset",
"gpui",
"grammars",
@@ -9866,7 +9939,7 @@ dependencies = [
"core-video",
"coreaudio-rs 0.12.1",
"cpal",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_platform",
"gpui_tokio",
@@ -9910,7 +9983,7 @@ name = "lmstudio"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -9981,7 +10054,7 @@ dependencies = [
"async-pipe",
"collections",
"ctor",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_util",
"log",
@@ -10001,7 +10074,7 @@ dependencies = [
[[package]]
name = "lsp-types"
version = "0.95.1"
-source = "git+https://github.com/zed-industries/lsp-types?rev=a4f410987660bf560d1e617cb78117c6b6b9f599#a4f410987660bf560d1e617cb78117c6b6b9f599"
+source = "git+https://github.com/zed-industries/lsp-types?rev=c7396459fefc7886b4adfa3b596832405ae1e880#c7396459fefc7886b4adfa3b596832405ae1e880"
dependencies = [
"bitflags 1.3.2",
"serde",
@@ -10121,7 +10194,7 @@ dependencies = [
"collections",
"env_logger 0.11.8",
"fs",
- "futures 0.3.31",
+ "futures 0.3.32",
"gpui",
"gpui_platform",
"html5ever 0.27.0",
@@ -10152,6 +10225,7 @@ dependencies = [
"language",
"log",
"markdown",
+ "project",
"settings",
"tempfile",
"theme_settings",
@@ -10568,7 +10642,7 @@ name = "mistral"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -10757,7 +10831,7 @@ name = "nc"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"net",
"smol",
]
@@ -10853,7 +10927,7 @@ dependencies = [
"async-std",
"async-tar",
"async-trait",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"log",
"paths",
@@ -11177,7 +11251,7 @@ version = "0.9.2"
source = "git+https://github.com/KillTheMule/nvim-rs?rev=764dd270c642f77f10f3e19d05cc178a6cbe69f3#764dd270c642f77f10f3e19d05cc178a6cbe69f3"
dependencies = [
"async-trait",
- "futures 0.3.31",
+ "futures 0.3.32",
"log",
"rmp",
"rmpv",
@@ -11372,12 +11446,54 @@ dependencies = [
"memchr",
]
+[[package]]
+name = "octocrab"
+version = "0.49.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "63f6687a23731011d0117f9f4c3cdabaa7b5e42ca671f42b5cc0657c492540e3"
+dependencies = [
+ "arc-swap",
+ "async-trait",
+ "base64 0.22.1",
+ "bytes 1.11.1",
+ "cargo_metadata 0.23.1",
+ "cfg-if",
+ "chrono",
+ "either",
+ "futures 0.3.32",
+ "futures-core",
+ "futures-util",
+ "getrandom 0.2.16",
+ "http 1.3.1",
+ "http-body 1.0.1",
+ "http-body-util",
+ "hyper 1.7.0",
+ "hyper-rustls 0.27.7",
+ "hyper-timeout",
+ "hyper-util",
+ "jsonwebtoken",
+ "once_cell",
+ "percent-encoding",
+ "pin-project",
+ "secrecy",
+ "serde",
+ "serde_json",
+ "serde_path_to_error",
+ "serde_urlencoded",
+ "snafu",
+ "tokio",
+ "tower 0.5.2",
+ "tower-http 0.6.6",
+ "url",
+ "web-time",
+]
+
[[package]]
name = "ollama"
version = "0.1.0"
dependencies = [
"anyhow",
- "futures 0.3.31",
+ "futures 0.3.32",
"http_client",
"schemars",
"serde",
@@ -61,6 +61,7 @@ members = [
"crates/edit_prediction_ui",
"crates/editor",
"crates/encoding_selector",
+ "crates/env_var",
"crates/etw_tracing",
"crates/eval_cli",
"crates/eval_utils",
@@ -220,6 +221,7 @@ members = [
"crates/x_ai",
"crates/zed",
"crates/zed_actions",
+ "crates/zed_credentials_provider",
"crates/zed_env_vars",
"crates/zeta_prompt",
"crates/zlog",
@@ -240,6 +242,7 @@ members = [
# Tooling
#
+ "tooling/compliance",
"tooling/perf",
"tooling/xtask",
]
@@ -287,6 +290,7 @@ collab_ui = { path = "crates/collab_ui" }
collections = { path = "crates/collections", version = "0.1.0" }
command_palette = { path = "crates/command_palette" }
command_palette_hooks = { path = "crates/command_palette_hooks" }
+compliance = { path = "tooling/compliance" }
component = { path = "crates/component" }
component_preview = { path = "crates/component_preview" }
context_server = { path = "crates/context_server" }
@@ -309,6 +313,7 @@ dev_container = { path = "crates/dev_container" }
diagnostics = { path = "crates/diagnostics" }
editor = { path = "crates/editor" }
encoding_selector = { path = "crates/encoding_selector" }
+env_var = { path = "crates/env_var" }
etw_tracing = { path = "crates/etw_tracing" }
eval_utils = { path = "crates/eval_utils" }
extension = { path = "crates/extension" }
@@ -465,6 +470,7 @@ worktree = { path = "crates/worktree" }
x_ai = { path = "crates/x_ai" }
zed = { path = "crates/zed" }
zed_actions = { path = "crates/zed_actions" }
+zed_credentials_provider = { path = "crates/zed_credentials_provider" }
zed_env_vars = { path = "crates/zed_env_vars" }
edit_prediction = { path = "crates/edit_prediction" }
zeta_prompt = { path = "crates/zeta_prompt" }
@@ -543,6 +549,7 @@ derive_more = { version = "2.1.1", features = [
"add_assign",
"deref",
"deref_mut",
+ "display",
"from_str",
"mul",
"mul_assign",
@@ -592,7 +599,7 @@ linkify = "0.10.0"
libwebrtc = "0.3.26"
livekit = { version = "0.7.32", features = ["tokio", "rustls-tls-native-roots"] }
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
-lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "a4f410987660bf560d1e617cb78117c6b6b9f599" }
+lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "c7396459fefc7886b4adfa3b596832405ae1e880" }
mach2 = "0.5"
markup5ever_rcdom = "0.3.0"
metal = "0.33"
@@ -1,5 +1,4 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
-<path d="M5.25 3H3.75C3.33579 3 3 3.33579 3 3.75V12.25C3 12.6642 3.33579 13 3.75 13H5.25C5.66421 13 6 12.6642 6 12.25V3.75C6 3.33579 5.66421 3 5.25 3Z" fill="#C6CAD0"/>
-<path opacity="0.7" d="M8.5 2.5C8.5 2.22386 8.27614 2 8 2C7.72386 2 7.5 2.22386 7.5 2.5V13.5C7.5 13.7761 7.72386 14 8 14C8.27614 14 8.5 13.7761 8.5 13.5V2.5Z" fill="#C6CAD0"/>
-<path d="M12.25 3H10.75C10.3358 3 10 3.33579 10 3.75V12.25C10 12.6642 10.3358 13 10.75 13H12.25C12.6642 13 13 12.6642 13 12.25V3.75C13 3.33579 12.6642 3 12.25 3Z" fill="#C6CAD0"/>
+<rect x="6.4" y="2.6" width="10.8" height="3.8" rx="0.9" transform="rotate(90 6.4 2.6)" fill="#C6CAD0" fill-opacity="0.5" stroke="#C6CAD0" stroke-width="1.2"/>
+<rect x="13.4" y="2.6" width="10.8" height="3.8" rx="0.9" transform="rotate(90 13.4 2.6)" fill="#C6CAD0" stroke="#C6CAD0" stroke-width="1.2"/>
</svg>
@@ -0,0 +1,7 @@
+<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
+<rect opacity="0.5" x="14" y="2" width="12" height="5" rx="1.5" transform="rotate(90 14 2)" fill="#C6CAD0" fill-opacity="0.2"/>
+<rect opacity="0.5" x="7" y="2" width="12" height="5" rx="1.5" transform="rotate(90 7 2)" fill="#C6CAD0" fill-opacity="0.2"/>
+<path d="M10.2002 12.5C10.2002 12.6657 10.3343 12.7998 10.5 12.7998H12.3125L13.4951 13.4824C13.5254 13.4999 13.557 13.5142 13.5879 13.5293C13.3145 13.8182 12.9291 14 12.5 14H10.5L10.3467 13.9922C9.59028 13.9154 9 13.2767 9 12.5V10.8867L10.2002 11.5791V12.5ZM10.2002 6.95996L9 6.26758V3.5C9 2.72334 9.59028 2.08461 10.3467 2.00781L10.5 2H12.5C13.3284 2 14 2.67157 14 3.5V9.1543L12.7998 8.46094V3.5C12.7998 3.33431 12.6657 3.2002 12.5 3.2002H10.5C10.3343 3.2002 10.2002 3.33431 10.2002 3.5V6.95996Z" fill="#C6CAD0"/>
+<path d="M7 9.73242V12.5C7 13.3284 6.32843 14 5.5 14H3.5L3.34668 13.9922C2.59028 13.9154 2 13.2767 2 12.5V6.84473L3.2002 7.53809V12.5C3.2002 12.6657 3.33431 12.7998 3.5 12.7998H5.5C5.66569 12.7998 5.7998 12.6657 5.7998 12.5V9.03906L7 9.73242ZM5.5 2C6.32843 2 7 2.67157 7 3.5V5.1123L5.7998 4.41992V3.5C5.7998 3.33431 5.66569 3.2002 5.5 3.2002H3.6875L2.50488 2.51758C2.47399 2.49975 2.44173 2.48513 2.41016 2.46973C2.65063 2.21547 2.97893 2.04515 3.34668 2.00781L3.5 2H5.5Z" fill="#C6CAD0"/>
+<rect x="0.427673" y="4.78281" width="2" height="16.3329" rx="0.5" transform="rotate(-60 0.427673 4.78281)" fill="#C6CAD0"/>
+</svg>
@@ -1,4 +1,4 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
-<path d="M12 3H4C3.44772 3 3 3.44772 3 4V6C3 6.55228 3.44772 7 4 7H12C12.5523 7 13 6.55228 13 6V4C13 3.44772 12.5523 3 12 3Z" fill="#C6CAD0"/>
-<path d="M12 9.5H4C3.72386 9.5 3.5 9.72386 3.5 10V12C3.5 12.2761 3.72386 12.5 4 12.5H12C12.2761 12.5 12.5 12.2761 12.5 12V10C12.5 9.72386 12.2761 9.5 12 9.5Z" fill="#C6CAD0" fill-opacity="0.12" stroke="#C6CAD0" stroke-width="1.2"/>
+<rect x="2.6" y="9.6" width="10.8" height="3.8" rx="0.9" fill="#C6CAD0" fill-opacity="0.5" stroke="#C6CAD0" stroke-width="1.2"/>
+<rect x="2" y="2" width="12" height="5" rx="1.5" fill="#C6CAD0"/>
</svg>
@@ -284,12 +284,36 @@
"context": "AcpThread",
"bindings": {
"ctrl--": "pane::GoBack",
+ "pageup": "agent::ScrollOutputPageUp",
+ "pagedown": "agent::ScrollOutputPageDown",
+ "home": "agent::ScrollOutputToTop",
+ "end": "agent::ScrollOutputToBottom",
+ "up": "agent::ScrollOutputLineUp",
+ "down": "agent::ScrollOutputLineDown",
+ "shift-pageup": "agent::ScrollOutputToPreviousMessage",
+ "shift-pagedown": "agent::ScrollOutputToNextMessage",
+ "ctrl-alt-pageup": "agent::ScrollOutputPageUp",
+ "ctrl-alt-pagedown": "agent::ScrollOutputPageDown",
+ "ctrl-alt-home": "agent::ScrollOutputToTop",
+ "ctrl-alt-end": "agent::ScrollOutputToBottom",
+ "ctrl-alt-up": "agent::ScrollOutputLineUp",
+ "ctrl-alt-down": "agent::ScrollOutputLineDown",
+ "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage",
+ "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage",
},
},
{
"context": "AcpThread > Editor",
"use_key_equivalents": true,
"bindings": {
+ "ctrl-alt-pageup": "agent::ScrollOutputPageUp",
+ "ctrl-alt-pagedown": "agent::ScrollOutputPageDown",
+ "ctrl-alt-home": "agent::ScrollOutputToTop",
+ "ctrl-alt-end": "agent::ScrollOutputToBottom",
+ "ctrl-alt-up": "agent::ScrollOutputLineUp",
+ "ctrl-alt-down": "agent::ScrollOutputLineDown",
+ "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage",
+ "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage",
"ctrl-shift-r": "agent::OpenAgentDiff",
"ctrl-shift-d": "git::Diff",
"shift-alt-y": "agent::KeepAll",
@@ -574,6 +598,7 @@
// Change the default action on `menu::Confirm` by setting the parameter
// "alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": true }],
"alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": false }],
+ "ctrl-r": ["projects::OpenRecent", { "create_new_window": false }],
"alt-shift-open": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }],
// Change to open path modal for existing remote connection by setting the parameter
// "alt-ctrl-shift-o": "["projects::OpenRemote", { "from_existing_connection": true }]",
@@ -1123,6 +1148,8 @@
"bindings": {
"ctrl-k": "recent_projects::ToggleActionsMenu",
"ctrl-shift-a": "workspace::AddFolderToProject",
+ "shift-backspace": "recent_projects::RemoveSelected",
+ "ctrl-shift-enter": "recent_projects::AddToWorkspace",
},
},
{
@@ -1249,6 +1276,8 @@
"alt-down": "markdown::ScrollDownByItem",
"ctrl-home": "markdown::ScrollToTop",
"ctrl-end": "markdown::ScrollToBottom",
+ "find": "buffer_search::Deploy",
+ "ctrl-f": "buffer_search::Deploy",
},
},
{
@@ -327,12 +327,36 @@
"context": "AcpThread",
"bindings": {
"ctrl--": "pane::GoBack",
+ "pageup": "agent::ScrollOutputPageUp",
+ "pagedown": "agent::ScrollOutputPageDown",
+ "home": "agent::ScrollOutputToTop",
+ "end": "agent::ScrollOutputToBottom",
+ "up": "agent::ScrollOutputLineUp",
+ "down": "agent::ScrollOutputLineDown",
+ "shift-pageup": "agent::ScrollOutputToPreviousMessage",
+ "shift-pagedown": "agent::ScrollOutputToNextMessage",
+ "ctrl-pageup": "agent::ScrollOutputPageUp",
+ "ctrl-pagedown": "agent::ScrollOutputPageDown",
+ "ctrl-home": "agent::ScrollOutputToTop",
+ "ctrl-end": "agent::ScrollOutputToBottom",
+ "ctrl-alt-up": "agent::ScrollOutputLineUp",
+ "ctrl-alt-down": "agent::ScrollOutputLineDown",
+ "ctrl-alt-pageup": "agent::ScrollOutputToPreviousMessage",
+ "ctrl-alt-pagedown": "agent::ScrollOutputToNextMessage",
},
},
{
"context": "AcpThread > Editor",
"use_key_equivalents": true,
"bindings": {
+ "ctrl-pageup": "agent::ScrollOutputPageUp",
+ "ctrl-pagedown": "agent::ScrollOutputPageDown",
+ "ctrl-home": "agent::ScrollOutputToTop",
+ "ctrl-end": "agent::ScrollOutputToBottom",
+ "ctrl-alt-up": "agent::ScrollOutputLineUp",
+ "ctrl-alt-down": "agent::ScrollOutputLineDown",
+ "ctrl-alt-pageup": "agent::ScrollOutputToPreviousMessage",
+ "ctrl-alt-pagedown": "agent::ScrollOutputToNextMessage",
"shift-ctrl-r": "agent::OpenAgentDiff",
"shift-ctrl-d": "git::Diff",
"shift-alt-y": "agent::KeepAll",
@@ -644,6 +668,7 @@
// Change the default action on `menu::Confirm` by setting the parameter
// "alt-cmd-o": ["projects::OpenRecent", {"create_new_window": true }],
"alt-cmd-o": ["projects::OpenRecent", { "create_new_window": false }],
+ "ctrl-r": ["projects::OpenRecent", { "create_new_window": false }],
"ctrl-cmd-o": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }],
"ctrl-cmd-shift-o": ["projects::OpenRemote", { "from_existing_connection": true, "create_new_window": false }],
"cmd-ctrl-b": "branches::OpenRecent",
@@ -1188,6 +1213,8 @@
"bindings": {
"cmd-k": "recent_projects::ToggleActionsMenu",
"cmd-shift-a": "workspace::AddFolderToProject",
+ "shift-backspace": "recent_projects::RemoveSelected",
+ "cmd-shift-enter": "recent_projects::AddToWorkspace",
},
},
{
@@ -1349,6 +1376,7 @@
"alt-down": "markdown::ScrollDownByItem",
"cmd-up": "markdown::ScrollToTop",
"cmd-down": "markdown::ScrollToBottom",
+ "cmd-f": "buffer_search::Deploy",
},
},
{
@@ -285,12 +285,36 @@
"context": "AcpThread",
"bindings": {
"ctrl--": "pane::GoBack",
+ "pageup": "agent::ScrollOutputPageUp",
+ "pagedown": "agent::ScrollOutputPageDown",
+ "home": "agent::ScrollOutputToTop",
+ "end": "agent::ScrollOutputToBottom",
+ "up": "agent::ScrollOutputLineUp",
+ "down": "agent::ScrollOutputLineDown",
+ "shift-pageup": "agent::ScrollOutputToPreviousMessage",
+ "shift-pagedown": "agent::ScrollOutputToNextMessage",
+ "ctrl-alt-pageup": "agent::ScrollOutputPageUp",
+ "ctrl-alt-pagedown": "agent::ScrollOutputPageDown",
+ "ctrl-alt-home": "agent::ScrollOutputToTop",
+ "ctrl-alt-end": "agent::ScrollOutputToBottom",
+ "ctrl-alt-up": "agent::ScrollOutputLineUp",
+ "ctrl-alt-down": "agent::ScrollOutputLineDown",
+ "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage",
+ "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage",
},
},
{
"context": "AcpThread > Editor",
"use_key_equivalents": true,
"bindings": {
+ "ctrl-alt-pageup": "agent::ScrollOutputPageUp",
+ "ctrl-alt-pagedown": "agent::ScrollOutputPageDown",
+ "ctrl-alt-home": "agent::ScrollOutputToTop",
+ "ctrl-alt-end": "agent::ScrollOutputToBottom",
+ "ctrl-alt-up": "agent::ScrollOutputLineUp",
+ "ctrl-alt-down": "agent::ScrollOutputLineDown",
+ "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage",
+ "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage",
"ctrl-shift-r": "agent::OpenAgentDiff",
"ctrl-shift-d": "git::Diff",
"shift-alt-y": "agent::KeepAll",
@@ -1134,6 +1158,8 @@
"bindings": {
"ctrl-k": "recent_projects::ToggleActionsMenu",
"ctrl-shift-a": "workspace::AddFolderToProject",
+ "shift-backspace": "recent_projects::RemoveSelected",
+ "ctrl-shift-enter": "recent_projects::AddToWorkspace",
},
},
{
@@ -1274,6 +1300,8 @@
"alt-down": "markdown::ScrollDownByItem",
"ctrl-home": "markdown::ScrollToTop",
"ctrl-end": "markdown::ScrollToBottom",
+ "find": "buffer_search::Deploy",
+ "ctrl-f": "buffer_search::Deploy",
},
},
{
@@ -1096,6 +1096,7 @@
"ctrl-e": "markdown::ScrollDown",
"g g": "markdown::ScrollToTop",
"shift-g": "markdown::ScrollToBottom",
+ "/": "buffer_search::Deploy",
},
},
{
@@ -225,6 +225,11 @@
// 3. Hide on both typing and cursor movement:
// "on_typing_and_movement"
"hide_mouse": "on_typing_and_movement",
+ // Determines whether the focused panel follows the mouse location.
+ "focus_follows_mouse": {
+ "enabled": false,
+ "debounce_ms": 250,
+ },
// Determines how snippets are sorted relative to other completion items.
//
// 1. Place snippets at the top of the completion list:
@@ -1102,11 +1107,14 @@
// "all_screens" - Show these notifications on all screens
// "never" - Never show these notifications
"notify_when_agent_waiting": "primary_screen",
- // Whether to play a sound when the agent has either completed
+ // When to play a sound when the agent has either completed
// its response, or needs user input.
-
- // Default: false
- "play_sound_when_agent_done": false,
+ // "never" - Never play the sound
+ // "when_hidden" - Only play the sound when the agent panel is not visible
+ // "always" - Always play the sound
+ //
+ // Default: never
+ "play_sound_when_agent_done": "never",
// Whether to have edit cards in the agent panel expanded, showing a preview of the full diff.
//
// Default: true
@@ -1136,6 +1144,11 @@
//
// Default: false
"show_turn_stats": false,
+ // Whether to show the merge conflict indicator in the status bar
+ // that offers to resolve conflicts using the agent.
+ //
+ // Default: true
+ "show_merge_conflict_indicator": true,
},
// Whether the screen sharing icon is shown in the os status bar.
"show_call_status_icon": true,
@@ -2404,6 +2417,7 @@
"toggle_relative_line_numbers": false,
"use_system_clipboard": "always",
"use_smartcase_find": false,
+ "use_regex_search": true,
"gdefault": false,
"highlight_on_yank_duration": 200,
"custom_digraphs": {},
@@ -2529,21 +2543,31 @@
"format_dap_log_messages": true,
"button": true,
},
- // Configures any number of settings profiles that are temporarily applied on
- // top of your existing user settings when selected from
- // `settings profile selector: toggle`.
+ // Configures any number of settings profiles that are temporarily applied
+ // when selected from `settings profile selector: toggle`.
+ //
+ // Each profile has an optional `base` ("user" or "default") and a `settings`
+ // object. When `base` is "user" (the default), the profile applies on top of
+ // your user settings. When `base` is "default", user settings are ignored and
+ // the profile applies on top of Zed's defaults.
+ //
// Examples:
// "profiles": {
// "Presenting": {
- // "agent_ui_font_size": 20.0,
- // "buffer_font_size": 20.0,
- // "theme": "One Light",
- // "ui_font_size": 20.0
+ // "base": "default",
+ // "settings": {
+ // "agent_ui_font_size": 20.0,
+ // "buffer_font_size": 20.0,
+ // "theme": "One Light",
+ // "ui_font_size": 20.0
+ // }
// },
// "Python (ty)": {
- // "languages": {
- // "Python": {
- // "language_servers": ["ty"]
+ // "settings": {
+ // "languages": {
+ // "Python": {
+ // "language_servers": ["ty"]
+ // }
// }
// }
// }
@@ -50,9 +50,9 @@
"show_command": true,
// Which edited buffers to save before running the task:
// * `all` — save all edited buffers
- // * `current` — save current buffer only
+ // * `current` — save currently active buffer only
// * `none` — don't save any buffers
- "save": "all",
+ "save": "none",
// Represents the tags for inline runnable indicators, or spawning multiple tasks at once.
// "tags": []
},
@@ -283,7 +283,7 @@
"font_weight": null
},
"preproc": {
- "color": "#bfbdb6ff",
+ "color": "#ff8f3fff",
"font_style": null,
"font_weight": null
},
@@ -391,6 +391,16 @@
"color": "#5ac1feff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#aad94cff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#f07178ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -675,7 +685,7 @@
"font_weight": null
},
"preproc": {
- "color": "#5c6166ff",
+ "color": "#fa8d3eff",
"font_style": null,
"font_weight": null
},
@@ -783,6 +793,16 @@
"color": "#3b9ee5ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#6cbf43ff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#ff6666ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -1067,7 +1087,7 @@
"font_weight": null
},
"preproc": {
- "color": "#cccac2ff",
+ "color": "#ffad65ff",
"font_style": null,
"font_weight": null
},
@@ -1175,6 +1195,16 @@
"color": "#72cffeff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#aad94cff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#f07178ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -293,7 +293,7 @@
"font_weight": null
},
"preproc": {
- "color": "#fbf1c7ff",
+ "color": "#fb4833ff",
"font_style": null,
"font_weight": null
},
@@ -406,6 +406,16 @@
"color": "#83a598ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#b8bb26ff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#fb4934ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -700,7 +710,7 @@
"font_weight": null
},
"preproc": {
- "color": "#fbf1c7ff",
+ "color": "#fb4833ff",
"font_style": null,
"font_weight": null
},
@@ -813,6 +823,16 @@
"color": "#83a598ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#b8bb26ff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#fb4934ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -1107,7 +1127,7 @@
"font_weight": null
},
"preproc": {
- "color": "#fbf1c7ff",
+ "color": "#fb4833ff",
"font_style": null,
"font_weight": null
},
@@ -1220,6 +1240,16 @@
"color": "#83a598ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#b8bb26ff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#fb4934ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -1514,7 +1544,7 @@
"font_weight": null
},
"preproc": {
- "color": "#282828ff",
+ "color": "#9d0006ff",
"font_style": null,
"font_weight": null
},
@@ -1627,6 +1657,16 @@
"color": "#0b6678ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#79740eff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#9d0006ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -1921,7 +1961,7 @@
"font_weight": null
},
"preproc": {
- "color": "#282828ff",
+ "color": "#9d0006ff",
"font_style": null,
"font_weight": null
},
@@ -2034,6 +2074,16 @@
"color": "#0b6678ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#79740eff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#9d0006ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -2328,7 +2378,7 @@
"font_weight": null
},
"preproc": {
- "color": "#282828ff",
+ "color": "#9d0006ff",
"font_style": null,
"font_weight": null
},
@@ -2441,6 +2491,16 @@
"color": "#0b6678ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#79740eff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#9d0006ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -290,7 +290,7 @@
"font_weight": null
},
"preproc": {
- "color": "#dce0e5ff",
+ "color": "#b477cfff",
"font_style": null,
"font_weight": null
},
@@ -403,6 +403,16 @@
"color": "#73ade9ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#98c379ff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#e06c75ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -692,7 +702,7 @@
"font_weight": null
},
"preproc": {
- "color": "#242529ff",
+ "color": "#a449abff",
"font_style": null,
"font_weight": null
},
@@ -805,6 +815,16 @@
"color": "#5b79e3ff",
"font_style": null,
"font_weight": null
+ },
+ "diff.plus": {
+ "color": "#50a14fff",
+ "font_style": null,
+ "font_weight": null
+ },
+ "diff.minus": {
+ "color": "#e45649ff",
+ "font_style": null,
+ "font_weight": null
}
}
}
@@ -1032,6 +1032,7 @@ pub struct AcpThread {
connection: Rc<dyn AgentConnection>,
token_usage: Option<TokenUsage>,
prompt_capabilities: acp::PromptCapabilities,
+ available_commands: Vec<acp::AvailableCommand>,
_observe_prompt_capabilities: Task<anyhow::Result<()>>,
terminals: HashMap<acp::TerminalId, Entity<Terminal>>,
pending_terminal_output: HashMap<acp::TerminalId, Vec<Vec<u8>>>,
@@ -1220,6 +1221,7 @@ impl AcpThread {
session_id,
token_usage: None,
prompt_capabilities,
+ available_commands: Vec::new(),
_observe_prompt_capabilities: task,
terminals: HashMap::default(),
pending_terminal_output: HashMap::default(),
@@ -1239,6 +1241,10 @@ impl AcpThread {
self.prompt_capabilities.clone()
}
+ pub fn available_commands(&self) -> &[acp::AvailableCommand] {
+ &self.available_commands
+ }
+
pub fn draft_prompt(&self) -> Option<&[acp::ContentBlock]> {
self.draft_prompt.as_deref()
}
@@ -1419,7 +1425,10 @@ impl AcpThread {
acp::SessionUpdate::AvailableCommandsUpdate(acp::AvailableCommandsUpdate {
available_commands,
..
- }) => cx.emit(AcpThreadEvent::AvailableCommandsUpdated(available_commands)),
+ }) => {
+ self.available_commands = available_commands.clone();
+ cx.emit(AcpThreadEvent::AvailableCommandsUpdated(available_commands));
+ }
acp::SessionUpdate::CurrentModeUpdate(acp::CurrentModeUpdate {
current_mode_id,
..
@@ -2616,7 +2625,7 @@ impl AcpThread {
text_diff(old_text.as_str(), &content)
.into_iter()
.map(|(range, replacement)| {
- (snapshot.anchor_range_around(range), replacement)
+ (snapshot.anchor_range_inside(range), replacement)
})
.collect::<Vec<_>>()
})
@@ -191,7 +191,7 @@ impl Diff {
}
pub fn has_revealed_range(&self, cx: &App) -> bool {
- self.multibuffer().read(cx).paths().next().is_some()
+ !self.multibuffer().read(cx).is_empty()
}
pub fn needs_update(&self, old_text: &str, new_text: &str, cx: &App) -> bool {
@@ -19,7 +19,9 @@ pub enum MentionUri {
File {
abs_path: PathBuf,
},
- PastedImage,
+ PastedImage {
+ name: String,
+ },
Directory {
abs_path: PathBuf,
},
@@ -155,7 +157,9 @@ impl MentionUri {
include_warnings,
})
} else if path.starts_with("/agent/pasted-image") {
- Ok(Self::PastedImage)
+ let name =
+ single_query_param(&url, "name")?.unwrap_or_else(|| "Image".to_string());
+ Ok(Self::PastedImage { name })
} else if path.starts_with("/agent/untitled-buffer") {
let fragment = url
.fragment()
@@ -227,7 +231,7 @@ impl MentionUri {
.unwrap_or_default()
.to_string_lossy()
.into_owned(),
- MentionUri::PastedImage => "Image".to_string(),
+ MentionUri::PastedImage { name } => name.clone(),
MentionUri::Symbol { name, .. } => name.clone(),
MentionUri::Thread { name, .. } => name.clone(),
MentionUri::Rule { name, .. } => name.clone(),
@@ -296,7 +300,7 @@ impl MentionUri {
MentionUri::File { abs_path } => {
FileIcons::get_icon(abs_path, cx).unwrap_or_else(|| IconName::File.path().into())
}
- MentionUri::PastedImage => IconName::Image.path().into(),
+ MentionUri::PastedImage { .. } => IconName::Image.path().into(),
MentionUri::Directory { abs_path } => FileIcons::get_folder_icon(false, abs_path, cx)
.unwrap_or_else(|| IconName::Folder.path().into()),
MentionUri::Symbol { .. } => IconName::Code.path().into(),
@@ -322,10 +326,18 @@ impl MentionUri {
url.set_path(&abs_path.to_string_lossy());
url
}
- MentionUri::PastedImage => Url::parse("zed:///agent/pasted-image").unwrap(),
+ MentionUri::PastedImage { name } => {
+ let mut url = Url::parse("zed:///agent/pasted-image").unwrap();
+ url.query_pairs_mut().append_pair("name", name);
+ url
+ }
MentionUri::Directory { abs_path } => {
let mut url = Url::parse("file:///").unwrap();
- url.set_path(&abs_path.to_string_lossy());
+ let mut path = abs_path.to_string_lossy().into_owned();
+ if !path.ends_with('/') && !path.ends_with('\\') {
+ path.push('/');
+ }
+ url.set_path(&path);
url
}
MentionUri::Symbol {
@@ -490,6 +502,21 @@ mod tests {
assert_eq!(uri.to_uri().to_string(), expected);
}
+ #[test]
+ fn test_directory_uri_round_trip_without_trailing_slash() {
+ let uri = MentionUri::Directory {
+ abs_path: PathBuf::from(path!("/path/to/dir")),
+ };
+ let serialized = uri.to_uri().to_string();
+ assert!(serialized.ends_with('/'), "directory URI must end with /");
+ let parsed = MentionUri::parse(&serialized, PathStyle::local()).unwrap();
+ assert!(
+ matches!(parsed, MentionUri::Directory { .. }),
+ "expected Directory variant, got {:?}",
+ parsed
+ );
+ }
+
#[test]
fn test_parse_symbol_uri() {
let symbol_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20");
@@ -738,6 +738,7 @@ impl ActionLog {
let task = if let Some(existing_file_content) = existing_file_content {
// Capture the agent's content before restoring existing file content
let agent_content = buffer.read(cx).text();
+ let buffer_id = buffer.read(cx).remote_id();
buffer.update(cx, |buffer, cx| {
buffer.start_transaction();
@@ -750,7 +751,10 @@ impl ActionLog {
undo_info = Some(PerBufferUndo {
buffer: buffer.downgrade(),
- edits_to_restore: vec![(Anchor::MIN..Anchor::MAX, agent_content)],
+ edits_to_restore: vec![(
+ Anchor::min_for_buffer(buffer_id)..Anchor::max_for_buffer(buffer_id),
+ agent_content,
+ )],
status: UndoBufferStatus::Created {
had_existing_content: true,
},
@@ -990,8 +994,8 @@ impl ActionLog {
let mut valid_edits = Vec::new();
for (anchor_range, text_to_restore) in per_buffer_undo.edits_to_restore {
- if anchor_range.start.buffer_id == Some(buffer.remote_id())
- && anchor_range.end.buffer_id == Some(buffer.remote_id())
+ if anchor_range.start.buffer_id == buffer.remote_id()
+ && anchor_range.end.buffer_id == buffer.remote_id()
{
valid_edits.push((anchor_range, text_to_restore));
}
@@ -374,13 +374,13 @@ impl EditAgent {
buffer.edit(edits.iter().cloned(), None, cx);
let max_edit_end = buffer
.summaries_for_anchors::<Point, _>(
- edits.iter().map(|(range, _)| &range.end),
+ edits.iter().map(|(range, _)| range.end),
)
.max()
.unwrap();
let min_edit_start = buffer
.summaries_for_anchors::<Point, _>(
- edits.iter().map(|(range, _)| &range.start),
+ edits.iter().map(|(range, _)| range.start),
)
.min()
.unwrap();
@@ -1519,7 +1519,7 @@ mod tests {
stream: &mut UnboundedReceiver<EditAgentOutputEvent>,
) -> Vec<EditAgentOutputEvent> {
let mut events = Vec::new();
- while let Ok(Some(event)) = stream.try_next() {
+ while let Ok(event) = stream.try_recv() {
events.push(event);
}
events
@@ -4,7 +4,7 @@ use crate::{
ListDirectoryTool, ListDirectoryToolInput, ReadFileTool, ReadFileToolInput,
};
use Role::*;
-use client::{Client, UserStore};
+use client::{Client, RefreshLlmTokenListener, UserStore};
use eval_utils::{EvalOutput, EvalOutputProcessor, OutcomeKind};
use fs::FakeFs;
use futures::{FutureExt, future::LocalBoxFuture};
@@ -1423,7 +1423,8 @@ impl EditAgentTest {
let client = Client::production(cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
settings::init(cx);
- language_model::init(user_store.clone(), client.clone(), cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
language_models::init(user_store, client.clone(), cx);
});
@@ -202,3 +202,214 @@ async fn test_edit_file_tool_in_thread_context(cx: &mut TestAppContext) {
);
});
}
+
+#[gpui::test]
+async fn test_streaming_edit_json_parse_error_does_not_cause_unsaved_changes(
+ cx: &mut TestAppContext,
+) {
+ super::init_test(cx);
+ super::always_allow_tools(cx);
+
+ // Enable the streaming edit file tool feature flag.
+ cx.update(|cx| {
+ cx.update_flags(true, vec!["streaming-edit-file-tool".to_string()]);
+ });
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/project"),
+ json!({
+ "src": {
+ "main.rs": "fn main() {\n println!(\"Hello, world!\");\n}\n"
+ }
+ }),
+ )
+ .await;
+
+ let project = project::Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
+ let project_context = cx.new(|_cx| ProjectContext::default());
+ let context_server_store = project.read_with(cx, |project, _| project.context_server_store());
+ let context_server_registry =
+ cx.new(|cx| crate::ContextServerRegistry::new(context_server_store.clone(), cx));
+ let model = Arc::new(FakeLanguageModel::default());
+ model.as_fake().set_supports_streaming_tools(true);
+ let fake_model = model.as_fake();
+
+ let thread = cx.new(|cx| {
+ let mut thread = crate::Thread::new(
+ project.clone(),
+ project_context,
+ context_server_registry,
+ crate::Templates::new(),
+ Some(model.clone()),
+ cx,
+ );
+ let language_registry = project.read(cx).languages().clone();
+ thread.add_tool(crate::StreamingEditFileTool::new(
+ project.clone(),
+ cx.weak_entity(),
+ thread.action_log().clone(),
+ language_registry,
+ ));
+ thread
+ });
+
+ let _events = thread
+ .update(cx, |thread, cx| {
+ thread.send(
+ UserMessageId::new(),
+ ["Write new content to src/main.rs"],
+ cx,
+ )
+ })
+ .unwrap();
+ cx.run_until_parked();
+
+ let tool_use_id = "edit_1";
+ let partial_1 = LanguageModelToolUse {
+ id: tool_use_id.into(),
+ name: EditFileTool::NAME.into(),
+ raw_input: json!({
+ "display_description": "Rewrite main.rs",
+ "path": "project/src/main.rs",
+ "mode": "write"
+ })
+ .to_string(),
+ input: json!({
+ "display_description": "Rewrite main.rs",
+ "path": "project/src/main.rs",
+ "mode": "write"
+ }),
+ is_input_complete: false,
+ thought_signature: None,
+ };
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(partial_1));
+ cx.run_until_parked();
+
+ let partial_2 = LanguageModelToolUse {
+ id: tool_use_id.into(),
+ name: EditFileTool::NAME.into(),
+ raw_input: json!({
+ "display_description": "Rewrite main.rs",
+ "path": "project/src/main.rs",
+ "mode": "write",
+ "content": "fn main() { /* rewritten */ }"
+ })
+ .to_string(),
+ input: json!({
+ "display_description": "Rewrite main.rs",
+ "path": "project/src/main.rs",
+ "mode": "write",
+ "content": "fn main() { /* rewritten */ }"
+ }),
+ is_input_complete: false,
+ thought_signature: None,
+ };
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(partial_2));
+ cx.run_until_parked();
+
+ // Now send a json parse error. At this point we have started writing content to the buffer.
+ fake_model.send_last_completion_stream_event(
+ LanguageModelCompletionEvent::ToolUseJsonParseError {
+ id: tool_use_id.into(),
+ tool_name: EditFileTool::NAME.into(),
+ raw_input: r#"{"display_description":"Rewrite main.rs","path":"project/src/main.rs","mode":"write","content":"fn main() { /* rewritten "#.into(),
+ json_parse_error: "EOF while parsing a string at line 1 column 95".into(),
+ },
+ );
+ fake_model
+ .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse));
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+
+ // cx.executor().advance_clock(Duration::from_secs(5));
+ // cx.run_until_parked();
+
+ assert!(
+ !fake_model.pending_completions().is_empty(),
+ "Thread should have retried after the error"
+ );
+
+ // Respond with a new, well-formed, complete edit_file tool use.
+ let tool_use = LanguageModelToolUse {
+ id: "edit_2".into(),
+ name: EditFileTool::NAME.into(),
+ raw_input: json!({
+ "display_description": "Rewrite main.rs",
+ "path": "project/src/main.rs",
+ "mode": "write",
+ "content": "fn main() {\n println!(\"Hello, rewritten!\");\n}\n"
+ })
+ .to_string(),
+ input: json!({
+ "display_description": "Rewrite main.rs",
+ "path": "project/src/main.rs",
+ "mode": "write",
+ "content": "fn main() {\n println!(\"Hello, rewritten!\");\n}\n"
+ }),
+ is_input_complete: true,
+ thought_signature: None,
+ };
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use));
+ fake_model
+ .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse));
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+
+ let pending_completions = fake_model.pending_completions();
+ assert!(
+ pending_completions.len() == 1,
+ "Expected only the follow-up completion containing the successful tool result"
+ );
+
+ let completion = pending_completions
+ .into_iter()
+ .last()
+ .expect("Expected a completion containing the tool result for edit_2");
+
+ let tool_result = completion
+ .messages
+ .iter()
+ .flat_map(|msg| &msg.content)
+ .find_map(|content| match content {
+ language_model::MessageContent::ToolResult(result)
+ if result.tool_use_id == language_model::LanguageModelToolUseId::from("edit_2") =>
+ {
+ Some(result)
+ }
+ _ => None,
+ })
+ .expect("Should have a tool result for edit_2");
+
+ // Ensure that the second tool call completed successfully and edits were applied.
+ assert!(
+ !tool_result.is_error,
+ "Tool result should succeed, got: {:?}",
+ tool_result
+ );
+ let content_text = match &tool_result.content {
+ language_model::LanguageModelToolResultContent::Text(t) => t.to_string(),
+ other => panic!("Expected text content, got: {:?}", other),
+ };
+ assert!(
+ !content_text.contains("file has been modified since you last read it"),
+ "Did not expect a stale last-read error, got: {content_text}"
+ );
+ assert!(
+ !content_text.contains("This file has unsaved changes"),
+ "Did not expect an unsaved-changes error, got: {content_text}"
+ );
+
+ let file_content = fs
+ .load(path!("/project/src/main.rs").as_ref())
+ .await
+ .expect("file should exist");
+ super::assert_eq!(
+ file_content,
+ "fn main() {\n println!(\"Hello, rewritten!\");\n}\n",
+ "The second edit should be applied and saved gracefully"
+ );
+
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+}
@@ -6,7 +6,7 @@ use acp_thread::{
use agent_client_protocol::{self as acp};
use agent_settings::AgentProfileId;
use anyhow::Result;
-use client::{Client, UserStore};
+use client::{Client, RefreshLlmTokenListener, UserStore};
use collections::IndexMap;
use context_server::{ContextServer, ContextServerCommand, ContextServerId};
use feature_flags::FeatureFlagAppExt as _;
@@ -3253,7 +3253,8 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
let clock = Arc::new(clock::FakeSystemClock::new());
let client = Client::new(clock, http_client, cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
- language_model::init(user_store.clone(), client.clone(), cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
language_models::init(user_store, client.clone(), cx);
LanguageModelRegistry::test(cx);
});
@@ -3902,6 +3903,117 @@ async fn test_streaming_tool_completes_when_llm_stream_ends_without_final_input(
});
}
+#[gpui::test]
+async fn test_streaming_tool_json_parse_error_is_forwarded_to_running_tool(
+ cx: &mut TestAppContext,
+) {
+ init_test(cx);
+ always_allow_tools(cx);
+
+ let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
+ let fake_model = model.as_fake();
+
+ thread.update(cx, |thread, _cx| {
+ thread.add_tool(StreamingJsonErrorContextTool);
+ });
+
+ let _events = thread
+ .update(cx, |thread, cx| {
+ thread.send(
+ UserMessageId::new(),
+ ["Use the streaming_json_error_context tool"],
+ cx,
+ )
+ })
+ .unwrap();
+ cx.run_until_parked();
+
+ let tool_use = LanguageModelToolUse {
+ id: "tool_1".into(),
+ name: StreamingJsonErrorContextTool::NAME.into(),
+ raw_input: r#"{"text": "partial"#.into(),
+ input: json!({"text": "partial"}),
+ is_input_complete: false,
+ thought_signature: None,
+ };
+ fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use));
+ cx.run_until_parked();
+
+ fake_model.send_last_completion_stream_event(
+ LanguageModelCompletionEvent::ToolUseJsonParseError {
+ id: "tool_1".into(),
+ tool_name: StreamingJsonErrorContextTool::NAME.into(),
+ raw_input: r#"{"text": "partial"#.into(),
+ json_parse_error: "EOF while parsing a string at line 1 column 17".into(),
+ },
+ );
+ fake_model
+ .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse));
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+
+ cx.executor().advance_clock(Duration::from_secs(5));
+ cx.run_until_parked();
+
+ let completion = fake_model
+ .pending_completions()
+ .pop()
+ .expect("No running turn");
+
+ let tool_results: Vec<_> = completion
+ .messages
+ .iter()
+ .flat_map(|message| &message.content)
+ .filter_map(|content| match content {
+ MessageContent::ToolResult(result)
+ if result.tool_use_id == language_model::LanguageModelToolUseId::from("tool_1") =>
+ {
+ Some(result)
+ }
+ _ => None,
+ })
+ .collect();
+
+ assert_eq!(
+ tool_results.len(),
+ 1,
+ "Expected exactly 1 tool result for tool_1, got {}: {:#?}",
+ tool_results.len(),
+ tool_results
+ );
+
+ let result = tool_results[0];
+ assert!(result.is_error);
+ let content_text = match &result.content {
+ language_model::LanguageModelToolResultContent::Text(text) => text.to_string(),
+ other => panic!("Expected text content, got {:?}", other),
+ };
+ assert!(
+ content_text.contains("Saw partial text 'partial' before invalid JSON"),
+ "Expected tool-enriched partial context, got: {content_text}"
+ );
+ assert!(
+ content_text
+ .contains("Error parsing input JSON: EOF while parsing a string at line 1 column 17"),
+ "Expected forwarded JSON parse error, got: {content_text}"
+ );
+ assert!(
+ !content_text.contains("tool input was not fully received"),
+ "Should not contain orphaned sender error, got: {content_text}"
+ );
+
+ fake_model.send_last_completion_stream_text_chunk("Done");
+ fake_model.end_last_completion_stream();
+ cx.run_until_parked();
+
+ thread.read_with(cx, |thread, _cx| {
+ assert!(
+ thread.is_turn_complete(),
+ "Thread should not be stuck; the turn should have completed",
+ );
+ });
+}
+
/// Filters out the stop events for asserting against in tests
fn stop_events(result_events: Vec<Result<ThreadEvent>>) -> Vec<acp::StopReason> {
result_events
@@ -3958,6 +4070,7 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
InfiniteTool::NAME: true,
CancellationAwareTool::NAME: true,
StreamingEchoTool::NAME: true,
+ StreamingJsonErrorContextTool::NAME: true,
StreamingFailingEchoTool::NAME: true,
TerminalTool::NAME: true,
UpdatePlanTool::NAME: true,
@@ -3982,7 +4095,8 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
cx.set_http_client(Arc::new(http_client));
let client = Client::production(cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
- language_model::init(user_store.clone(), client.clone(), cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
language_models::init(user_store, client.clone(), cx);
}
};
@@ -6206,9 +6320,9 @@ async fn test_edit_file_tool_allow_rule_skips_confirmation(cx: &mut TestAppConte
cx.run_until_parked();
- let event = rx.try_next();
+ let event = rx.try_recv();
assert!(
- !matches!(event, Ok(Some(Ok(ThreadEvent::ToolCallAuthorization(_))))),
+ !matches!(event, Ok(Ok(ThreadEvent::ToolCallAuthorization(_)))),
"expected no authorization request for allowed .md file"
);
}
@@ -6350,9 +6464,9 @@ async fn test_fetch_tool_allow_rule_skips_confirmation(cx: &mut TestAppContext)
cx.run_until_parked();
- let event = rx.try_next();
+ let event = rx.try_recv();
assert!(
- !matches!(event, Ok(Some(Ok(ThreadEvent::ToolCallAuthorization(_))))),
+ !matches!(event, Ok(Ok(ThreadEvent::ToolCallAuthorization(_)))),
"expected no authorization request for allowed docs.rs URL"
);
}
@@ -56,13 +56,12 @@ impl AgentTool for StreamingEchoTool {
fn run(
self: Arc<Self>,
- mut input: ToolInput<Self::Input>,
+ input: ToolInput<Self::Input>,
_event_stream: ToolCallEventStream,
cx: &mut App,
) -> Task<Result<String, String>> {
let wait_until_complete_rx = self.wait_until_complete_rx.lock().unwrap().take();
cx.spawn(async move |_cx| {
- while input.recv_partial().await.is_some() {}
let input = input
.recv()
.await
@@ -75,6 +74,68 @@ impl AgentTool for StreamingEchoTool {
}
}
+#[derive(JsonSchema, Serialize, Deserialize)]
+pub struct StreamingJsonErrorContextToolInput {
+ /// The text to echo.
+ pub text: String,
+}
+
+pub struct StreamingJsonErrorContextTool;
+
+impl AgentTool for StreamingJsonErrorContextTool {
+ type Input = StreamingJsonErrorContextToolInput;
+ type Output = String;
+
+ const NAME: &'static str = "streaming_json_error_context";
+
+ fn supports_input_streaming() -> bool {
+ true
+ }
+
+ fn kind() -> acp::ToolKind {
+ acp::ToolKind::Other
+ }
+
+ fn initial_title(
+ &self,
+ _input: Result<Self::Input, serde_json::Value>,
+ _cx: &mut App,
+ ) -> SharedString {
+ "Streaming JSON Error Context".into()
+ }
+
+ fn run(
+ self: Arc<Self>,
+ mut input: ToolInput<Self::Input>,
+ _event_stream: ToolCallEventStream,
+ cx: &mut App,
+ ) -> Task<Result<String, String>> {
+ cx.spawn(async move |_cx| {
+ let mut last_partial_text = None;
+
+ loop {
+ match input.next().await {
+ Ok(ToolInputPayload::Partial(partial)) => {
+ if let Some(text) = partial.get("text").and_then(|value| value.as_str()) {
+ last_partial_text = Some(text.to_string());
+ }
+ }
+ Ok(ToolInputPayload::Full(input)) => return Ok(input.text),
+ Ok(ToolInputPayload::InvalidJson { error_message }) => {
+ let partial_text = last_partial_text.unwrap_or_default();
+ return Err(format!(
+ "Saw partial text '{partial_text}' before invalid JSON: {error_message}"
+ ));
+ }
+ Err(error) => {
+ return Err(format!("Failed to receive tool input: {error}"));
+ }
+ }
+ }
+ })
+ }
+}
+
/// A streaming tool that echoes its input, used to test streaming tool
/// lifecycle (e.g. partial delivery and cleanup when the LLM stream ends
/// before `is_input_complete`).
@@ -119,7 +180,7 @@ impl AgentTool for StreamingFailingEchoTool {
) -> Task<Result<Self::Output, Self::Output>> {
cx.spawn(async move |_cx| {
for _ in 0..self.receive_chunks_until_failure {
- let _ = input.recv_partial().await;
+ let _ = input.next().await;
}
Err("failed".into())
})
@@ -22,13 +22,13 @@ use client::UserStore;
use cloud_api_types::Plan;
use collections::{HashMap, HashSet, IndexMap};
use fs::Fs;
-use futures::stream;
use futures::{
FutureExt,
channel::{mpsc, oneshot},
future::Shared,
stream::FuturesUnordered,
};
+use futures::{StreamExt, stream};
use gpui::{
App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity,
};
@@ -47,7 +47,6 @@ use schemars::{JsonSchema, Schema};
use serde::de::DeserializeOwned;
use serde::{Deserialize, Serialize};
use settings::{LanguageModelSelection, Settings, ToolPermissionMode, update_settings_file};
-use smol::stream::StreamExt;
use std::{
collections::BTreeMap,
marker::PhantomData,
@@ -253,7 +252,7 @@ impl UserMessage {
)
.ok();
}
- MentionUri::PastedImage => {
+ MentionUri::PastedImage { .. } => {
debug_panic!("pasted image URI should not be used in mention content")
}
MentionUri::Directory { .. } => {
@@ -2095,7 +2094,7 @@ impl Thread {
this.update(cx, |this, _cx| {
this.pending_message()
.tool_results
- .insert(tool_result.tool_use_id.clone(), tool_result);
+ .insert(tool_result.tool_use_id.clone(), tool_result)
})?;
Ok(())
}
@@ -2195,15 +2194,15 @@ impl Thread {
raw_input,
json_parse_error,
} => {
- return Ok(Some(Task::ready(
- self.handle_tool_use_json_parse_error_event(
- id,
- tool_name,
- raw_input,
- json_parse_error,
- event_stream,
- ),
- )));
+ return Ok(self.handle_tool_use_json_parse_error_event(
+ id,
+ tool_name,
+ raw_input,
+ json_parse_error,
+ event_stream,
+ cancellation_rx,
+ cx,
+ ));
}
UsageUpdate(usage) => {
telemetry::event!(
@@ -2304,12 +2303,12 @@ impl Thread {
if !tool_use.is_input_complete {
if tool.supports_input_streaming() {
let running_turn = self.running_turn.as_mut()?;
- if let Some(sender) = running_turn.streaming_tool_inputs.get(&tool_use.id) {
+ if let Some(sender) = running_turn.streaming_tool_inputs.get_mut(&tool_use.id) {
sender.send_partial(tool_use.input);
return None;
}
- let (sender, tool_input) = ToolInputSender::channel();
+ let (mut sender, tool_input) = ToolInputSender::channel();
sender.send_partial(tool_use.input);
running_turn
.streaming_tool_inputs
@@ -2331,13 +2330,13 @@ impl Thread {
}
}
- if let Some(sender) = self
+ if let Some(mut sender) = self
.running_turn
.as_mut()?
.streaming_tool_inputs
.remove(&tool_use.id)
{
- sender.send_final(tool_use.input);
+ sender.send_full(tool_use.input);
return None;
}
@@ -2410,10 +2409,12 @@ impl Thread {
raw_input: Arc<str>,
json_parse_error: String,
event_stream: &ThreadEventStream,
- ) -> LanguageModelToolResult {
+ cancellation_rx: watch::Receiver<bool>,
+ cx: &mut Context<Self>,
+ ) -> Option<Task<LanguageModelToolResult>> {
let tool_use = LanguageModelToolUse {
- id: tool_use_id.clone(),
- name: tool_name.clone(),
+ id: tool_use_id,
+ name: tool_name,
raw_input: raw_input.to_string(),
input: serde_json::json!({}),
is_input_complete: true,
@@ -2426,14 +2427,43 @@ impl Thread {
event_stream,
);
- let tool_output = format!("Error parsing input JSON: {json_parse_error}");
- LanguageModelToolResult {
- tool_use_id,
- tool_name,
- is_error: true,
- content: LanguageModelToolResultContent::Text(tool_output.into()),
- output: Some(serde_json::Value::String(raw_input.to_string())),
+ let tool = self.tool(tool_use.name.as_ref());
+
+ let Some(tool) = tool else {
+ let content = format!("No tool named {} exists", tool_use.name);
+ return Some(Task::ready(LanguageModelToolResult {
+ content: LanguageModelToolResultContent::Text(Arc::from(content)),
+ tool_use_id: tool_use.id,
+ tool_name: tool_use.name,
+ is_error: true,
+ output: None,
+ }));
+ };
+
+ let error_message = format!("Error parsing input JSON: {json_parse_error}");
+
+ if tool.supports_input_streaming()
+ && let Some(mut sender) = self
+ .running_turn
+ .as_mut()?
+ .streaming_tool_inputs
+ .remove(&tool_use.id)
+ {
+ sender.send_invalid_json(error_message);
+ return None;
}
+
+ log::debug!("Running tool {}. Received invalid JSON", tool_use.name);
+ let tool_input = ToolInput::invalid_json(error_message);
+ Some(self.run_tool(
+ tool,
+ tool_input,
+ tool_use.id,
+ tool_use.name,
+ event_stream,
+ cancellation_rx,
+ cx,
+ ))
}
fn send_or_update_tool_use(
@@ -3114,8 +3144,7 @@ impl EventEmitter<TitleUpdated> for Thread {}
/// For streaming tools, partial JSON snapshots arrive via `.recv_partial()` as the LLM streams
/// them, followed by the final complete input available through `.recv()`.
pub struct ToolInput<T> {
- partial_rx: mpsc::UnboundedReceiver<serde_json::Value>,
- final_rx: oneshot::Receiver<serde_json::Value>,
+ rx: mpsc::UnboundedReceiver<ToolInputPayload<serde_json::Value>>,
_phantom: PhantomData<T>,
}
@@ -3127,13 +3156,20 @@ impl<T: DeserializeOwned> ToolInput<T> {
}
pub fn ready(value: serde_json::Value) -> Self {
- let (partial_tx, partial_rx) = mpsc::unbounded();
- drop(partial_tx);
- let (final_tx, final_rx) = oneshot::channel();
- final_tx.send(value).ok();
+ let (tx, rx) = mpsc::unbounded();
+ tx.unbounded_send(ToolInputPayload::Full(value)).ok();
Self {
- partial_rx,
- final_rx,
+ rx,
+ _phantom: PhantomData,
+ }
+ }
+
+ pub fn invalid_json(error_message: String) -> Self {
+ let (tx, rx) = mpsc::unbounded();
+ tx.unbounded_send(ToolInputPayload::InvalidJson { error_message })
+ .ok();
+ Self {
+ rx,
_phantom: PhantomData,
}
}
@@ -3147,65 +3183,89 @@ impl<T: DeserializeOwned> ToolInput<T> {
/// Wait for the final deserialized input, ignoring all partial updates.
/// Non-streaming tools can use this to wait until the whole input is available.
pub async fn recv(mut self) -> Result<T> {
- // Drain any remaining partials
- while self.partial_rx.next().await.is_some() {}
+ while let Ok(value) = self.next().await {
+ match value {
+ ToolInputPayload::Full(value) => return Ok(value),
+ ToolInputPayload::Partial(_) => {}
+ ToolInputPayload::InvalidJson { error_message } => {
+ return Err(anyhow!(error_message));
+ }
+ }
+ }
+ Err(anyhow!("tool input was not fully received"))
+ }
+
+ pub async fn next(&mut self) -> Result<ToolInputPayload<T>> {
let value = self
- .final_rx
+ .rx
+ .next()
.await
- .map_err(|_| anyhow!("tool input was not fully received"))?;
- serde_json::from_value(value).map_err(Into::into)
- }
+ .ok_or_else(|| anyhow!("tool input was not fully received"))?;
- /// Returns the next partial JSON snapshot, or `None` when input is complete.
- /// Once this returns `None`, call `recv()` to get the final input.
- pub async fn recv_partial(&mut self) -> Option<serde_json::Value> {
- self.partial_rx.next().await
+ Ok(match value {
+ ToolInputPayload::Partial(payload) => ToolInputPayload::Partial(payload),
+ ToolInputPayload::Full(payload) => {
+ ToolInputPayload::Full(serde_json::from_value(payload)?)
+ }
+ ToolInputPayload::InvalidJson { error_message } => {
+ ToolInputPayload::InvalidJson { error_message }
+ }
+ })
}
fn cast<U: DeserializeOwned>(self) -> ToolInput<U> {
ToolInput {
- partial_rx: self.partial_rx,
- final_rx: self.final_rx,
+ rx: self.rx,
_phantom: PhantomData,
}
}
}
+pub enum ToolInputPayload<T> {
+ Partial(serde_json::Value),
+ Full(T),
+ InvalidJson { error_message: String },
+}
+
pub struct ToolInputSender {
- partial_tx: mpsc::UnboundedSender<serde_json::Value>,
- final_tx: Option<oneshot::Sender<serde_json::Value>>,
+ has_received_final: bool,
+ tx: mpsc::UnboundedSender<ToolInputPayload<serde_json::Value>>,
}
impl ToolInputSender {
pub(crate) fn channel() -> (Self, ToolInput<serde_json::Value>) {
- let (partial_tx, partial_rx) = mpsc::unbounded();
- let (final_tx, final_rx) = oneshot::channel();
+ let (tx, rx) = mpsc::unbounded();
let sender = Self {
- partial_tx,
- final_tx: Some(final_tx),
+ tx,
+ has_received_final: false,
};
let input = ToolInput {
- partial_rx,
- final_rx,
+ rx,
_phantom: PhantomData,
};
(sender, input)
}
pub(crate) fn has_received_final(&self) -> bool {
- self.final_tx.is_none()
+ self.has_received_final
}
- pub(crate) fn send_partial(&self, value: serde_json::Value) {
- self.partial_tx.unbounded_send(value).ok();
+ pub fn send_partial(&mut self, payload: serde_json::Value) {
+ self.tx
+ .unbounded_send(ToolInputPayload::Partial(payload))
+ .ok();
}
- pub(crate) fn send_final(mut self, value: serde_json::Value) {
- // Close the partial channel so recv_partial() returns None
- self.partial_tx.close_channel();
- if let Some(final_tx) = self.final_tx.take() {
- final_tx.send(value).ok();
- }
+ pub fn send_full(&mut self, payload: serde_json::Value) {
+ self.has_received_final = true;
+ self.tx.unbounded_send(ToolInputPayload::Full(payload)).ok();
+ }
+
+ pub fn send_invalid_json(&mut self, error_message: String) {
+ self.has_received_final = true;
+ self.tx
+ .unbounded_send(ToolInputPayload::InvalidJson { error_message })
+ .ok();
}
}
@@ -4251,68 +4311,78 @@ mod tests {
) {
let (thread, event_stream) = setup_thread_for_test(cx).await;
- cx.update(|cx| {
- thread.update(cx, |thread, _cx| {
- let tool_use_id = LanguageModelToolUseId::from("test_tool_id");
- let tool_name: Arc<str> = Arc::from("test_tool");
- let raw_input: Arc<str> = Arc::from("{invalid json");
- let json_parse_error = "expected value at line 1 column 1".to_string();
-
- // Call the function under test
- let result = thread.handle_tool_use_json_parse_error_event(
- tool_use_id.clone(),
- tool_name.clone(),
- raw_input.clone(),
- json_parse_error,
- &event_stream,
- );
-
- // Verify the result is an error
- assert!(result.is_error);
- assert_eq!(result.tool_use_id, tool_use_id);
- assert_eq!(result.tool_name, tool_name);
- assert!(matches!(
- result.content,
- LanguageModelToolResultContent::Text(_)
- ));
-
- // Verify the tool use was added to the message content
- {
- let last_message = thread.pending_message();
- assert_eq!(
- last_message.content.len(),
- 1,
- "Should have one tool_use in content"
- );
-
- match &last_message.content[0] {
- AgentMessageContent::ToolUse(tool_use) => {
- assert_eq!(tool_use.id, tool_use_id);
- assert_eq!(tool_use.name, tool_name);
- assert_eq!(tool_use.raw_input, raw_input.to_string());
- assert!(tool_use.is_input_complete);
- // Should fall back to empty object for invalid JSON
- assert_eq!(tool_use.input, json!({}));
- }
- _ => panic!("Expected ToolUse content"),
- }
- }
-
- // Insert the tool result (simulating what the caller does)
- thread
- .pending_message()
- .tool_results
- .insert(result.tool_use_id.clone(), result);
+ let tool_use_id = LanguageModelToolUseId::from("test_tool_id");
+ let tool_name: Arc<str> = Arc::from("test_tool");
+ let raw_input: Arc<str> = Arc::from("{invalid json");
+ let json_parse_error = "expected value at line 1 column 1".to_string();
+
+ let (_cancellation_tx, cancellation_rx) = watch::channel(false);
+
+ let result = cx
+ .update(|cx| {
+ thread.update(cx, |thread, cx| {
+ // Call the function under test
+ thread
+ .handle_tool_use_json_parse_error_event(
+ tool_use_id.clone(),
+ tool_name.clone(),
+ raw_input.clone(),
+ json_parse_error,
+ &event_stream,
+ cancellation_rx,
+ cx,
+ )
+ .unwrap()
+ })
+ })
+ .await;
+
+ // Verify the result is an error
+ assert!(result.is_error);
+ assert_eq!(result.tool_use_id, tool_use_id);
+ assert_eq!(result.tool_name, tool_name);
+ assert!(matches!(
+ result.content,
+ LanguageModelToolResultContent::Text(_)
+ ));
- // Verify the tool result was added
+ thread.update(cx, |thread, _cx| {
+ // Verify the tool use was added to the message content
+ {
let last_message = thread.pending_message();
assert_eq!(
- last_message.tool_results.len(),
+ last_message.content.len(),
1,
- "Should have one tool_result"
+ "Should have one tool_use in content"
);
- assert!(last_message.tool_results.contains_key(&tool_use_id));
- });
- });
+
+ match &last_message.content[0] {
+ AgentMessageContent::ToolUse(tool_use) => {
+ assert_eq!(tool_use.id, tool_use_id);
+ assert_eq!(tool_use.name, tool_name);
+ assert_eq!(tool_use.raw_input, raw_input.to_string());
+ assert!(tool_use.is_input_complete);
+ // Should fall back to empty object for invalid JSON
+ assert_eq!(tool_use.input, json!({}));
+ }
+ _ => panic!("Expected ToolUse content"),
+ }
+ }
+
+ // Insert the tool result (simulating what the caller does)
+ thread
+ .pending_message()
+ .tool_results
+ .insert(result.tool_use_id.clone(), result);
+
+ // Verify the tool result was added
+ let last_message = thread.pending_message();
+ assert_eq!(
+ last_message.tool_results.len(),
+ 1,
+ "Should have one tool_result"
+ );
+ assert!(last_message.tool_results.contains_key(&tool_use_id));
+ })
}
}
@@ -563,7 +563,7 @@ mod tests {
use crate::tools::{DeletePathTool, EditFileTool, FetchTool, TerminalTool};
use agent_settings::{AgentProfileId, CompiledRegex, InvalidRegexPattern, ToolRules};
use gpui::px;
- use settings::{DockPosition, NotifyWhenAgentWaiting};
+ use settings::{DockPosition, NotifyWhenAgentWaiting, PlaySoundWhenAgentDone};
use std::sync::Arc;
fn test_agent_settings(tool_permissions: ToolPermissions) -> AgentSettings {
@@ -584,7 +584,7 @@ mod tests {
default_profile: AgentProfileId::default(),
profiles: Default::default(),
notify_when_agent_waiting: NotifyWhenAgentWaiting::default(),
- play_sound_when_agent_done: false,
+ play_sound_when_agent_done: PlaySoundWhenAgentDone::default(),
single_file_review: false,
model_parameters: vec![],
enable_feedback: false,
@@ -595,6 +595,7 @@ mod tests {
message_editor_min_lines: 1,
tool_permissions,
show_turn_stats: false,
+ show_merge_conflict_indicator: true,
new_thread_location: Default::default(),
sidebar_side: Default::default(),
thinking_display: Default::default(),
@@ -383,8 +383,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -450,8 +450,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -370,8 +370,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -440,8 +440,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -439,8 +439,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -513,8 +513,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -1188,7 +1188,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
// Test 4: Path with .zed in the middle should require confirmation
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
@@ -1251,7 +1251,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
// 5.3: Normal in-project path with allow — no confirmation needed
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
@@ -1268,7 +1268,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
// 5.4: With Confirm default, non-project paths still prompt
cx.update(|cx| {
@@ -1586,8 +1586,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- stream_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ stream_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -1658,7 +1658,7 @@ mod tests {
} else {
auth.await.unwrap();
assert!(
- stream_rx.try_next().is_err(),
+ stream_rx.try_recv().is_err(),
"Failed for case: {} - path: {} - expected no confirmation but got one",
description,
path
@@ -1769,7 +1769,7 @@ mod tests {
} else {
auth.await.unwrap();
assert!(
- stream_rx.try_next().is_err(),
+ stream_rx.try_recv().is_err(),
"Failed for case: {} - path: {} - expected no confirmation but got one",
description,
path
@@ -1862,7 +1862,7 @@ mod tests {
stream_rx.expect_authorization().await;
} else {
assert!(
- stream_rx.try_next().is_err(),
+ stream_rx.try_recv().is_err(),
"Failed for case: {} - path: {} - expected no confirmation but got one",
description,
path
@@ -1963,7 +1963,7 @@ mod tests {
})
.await
.unwrap();
- assert!(stream_rx.try_next().is_err());
+ assert!(stream_rx.try_recv().is_err());
}
}
@@ -6,7 +6,7 @@ use crate::{
};
use Role::*;
use anyhow::{Context as _, Result};
-use client::{Client, UserStore};
+use client::{Client, RefreshLlmTokenListener, UserStore};
use fs::FakeFs;
use futures::{FutureExt, StreamExt, future::LocalBoxFuture};
use gpui::{AppContext as _, AsyncApp, Entity, TestAppContext, UpdateGlobal as _};
@@ -274,7 +274,8 @@ impl StreamingEditToolTest {
cx.set_http_client(http_client);
let client = Client::production(cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
- language_model::init(user_store.clone(), client.clone(), cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
language_models::init(user_store, client, cx);
});
@@ -982,13 +982,11 @@ mod tests {
"Expected private path validation error, got: {error}"
);
- let event = event_rx.try_next();
+ let event = event_rx.try_recv();
assert!(
!matches!(
event,
- Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(
- _
- ))))
+ Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_)))
),
"No authorization should be requested when validation fails before listing",
);
@@ -1030,13 +1028,11 @@ mod tests {
"Normal path should succeed without authorization"
);
- let event = event_rx.try_next();
+ let event = event_rx.try_recv();
assert!(
!matches!(
event,
- Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(
- _
- ))))
+ Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_)))
),
"No authorization should be requested for normal paths",
);
@@ -1087,13 +1083,11 @@ mod tests {
"Intra-project symlink should succeed without authorization: {result:?}",
);
- let event = event_rx.try_next();
+ let event = event_rx.try_recv();
assert!(
!matches!(
event,
- Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(
- _
- ))))
+ Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_)))
),
"No authorization should be requested for intra-project symlinks",
);
@@ -390,8 +390,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -457,8 +457,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -1317,13 +1317,11 @@ mod test {
"Expected private-files validation error, got: {error}"
);
- let event = event_rx.try_next();
+ let event = event_rx.try_recv();
assert!(
!matches!(
event,
- Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(
- _
- ))))
+ Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_)))
),
"No authorization should be requested when validation fails before read",
);
@@ -589,8 +589,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -662,8 +662,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -584,8 +584,8 @@ mod tests {
assert!(result.is_err(), "Tool should fail when policy denies");
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Deny policy should not emit symlink authorization prompt",
);
@@ -657,8 +657,8 @@ mod tests {
assert!(
!matches!(
- event_rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ event_rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"Expected a single authorization prompt",
);
@@ -2,6 +2,7 @@ use super::edit_file_tool::EditFileTool;
use super::restore_file_from_disk_tool::RestoreFileFromDiskTool;
use super::save_file_tool::SaveFileTool;
use super::tool_edit_parser::{ToolEditEvent, ToolEditParser};
+use crate::ToolInputPayload;
use crate::{
AgentTool, Thread, ToolCallEventStream, ToolInput,
edit_agent::{
@@ -12,7 +13,7 @@ use crate::{
use acp_thread::Diff;
use action_log::ActionLog;
use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields};
-use anyhow::{Context as _, Result};
+use anyhow::Result;
use collections::HashSet;
use futures::FutureExt as _;
use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity};
@@ -188,6 +189,10 @@ pub enum StreamingEditFileToolOutput {
},
Error {
error: String,
+ #[serde(default)]
+ input_path: Option<PathBuf>,
+ #[serde(default)]
+ diff: String,
},
}
@@ -195,6 +200,8 @@ impl StreamingEditFileToolOutput {
pub fn error(error: impl Into<String>) -> Self {
Self::Error {
error: error.into(),
+ input_path: None,
+ diff: String::new(),
}
}
}
@@ -215,7 +222,24 @@ impl std::fmt::Display for StreamingEditFileToolOutput {
)
}
}
- StreamingEditFileToolOutput::Error { error } => write!(f, "{error}"),
+ StreamingEditFileToolOutput::Error {
+ error,
+ diff,
+ input_path,
+ } => {
+ write!(f, "{error}\n")?;
+ if let Some(input_path) = input_path
+ && !diff.is_empty()
+ {
+ write!(
+ f,
+ "Edited {}:\n\n```diff\n{diff}\n```",
+ input_path.display()
+ )
+ } else {
+ write!(f, "No edits were made.")
+ }
+ }
}
}
}
@@ -233,6 +257,14 @@ pub struct StreamingEditFileTool {
language_registry: Arc<LanguageRegistry>,
}
+enum EditSessionResult {
+ Completed(EditSession),
+ Failed {
+ error: String,
+ session: Option<EditSession>,
+ },
+}
+
impl StreamingEditFileTool {
pub fn new(
project: Entity<Project>,
@@ -276,6 +308,158 @@ impl StreamingEditFileTool {
});
}
}
+
+ async fn ensure_buffer_saved(&self, buffer: &Entity<Buffer>, cx: &mut AsyncApp) {
+ let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| {
+ let settings = language_settings::LanguageSettings::for_buffer(buffer, cx);
+ settings.format_on_save != FormatOnSave::Off
+ });
+
+ if format_on_save_enabled {
+ self.project
+ .update(cx, |project, cx| {
+ project.format(
+ HashSet::from_iter([buffer.clone()]),
+ LspFormatTarget::Buffers,
+ false,
+ FormatTrigger::Save,
+ cx,
+ )
+ })
+ .await
+ .log_err();
+ }
+
+ self.project
+ .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
+ .await
+ .log_err();
+
+ self.action_log.update(cx, |log, cx| {
+ log.buffer_edited(buffer.clone(), cx);
+ });
+ }
+
+ async fn process_streaming_edits(
+ &self,
+ input: &mut ToolInput<StreamingEditFileToolInput>,
+ event_stream: &ToolCallEventStream,
+ cx: &mut AsyncApp,
+ ) -> EditSessionResult {
+ let mut session: Option<EditSession> = None;
+ let mut last_partial: Option<StreamingEditFileToolPartialInput> = None;
+
+ loop {
+ futures::select! {
+ payload = input.next().fuse() => {
+ match payload {
+ Ok(payload) => match payload {
+ ToolInputPayload::Partial(partial) => {
+ if let Ok(parsed) = serde_json::from_value::<StreamingEditFileToolPartialInput>(partial) {
+ let path_complete = parsed.path.is_some()
+ && parsed.path.as_ref() == last_partial.as_ref().and_then(|partial| partial.path.as_ref());
+
+ last_partial = Some(parsed.clone());
+
+ if session.is_none()
+ && path_complete
+ && let StreamingEditFileToolPartialInput {
+ path: Some(path),
+ display_description: Some(display_description),
+ mode: Some(mode),
+ ..
+ } = &parsed
+ {
+ match EditSession::new(
+ PathBuf::from(path),
+ display_description,
+ *mode,
+ self,
+ event_stream,
+ cx,
+ )
+ .await
+ {
+ Ok(created_session) => session = Some(created_session),
+ Err(error) => {
+ log::error!("Failed to create edit session: {}", error);
+ return EditSessionResult::Failed {
+ error,
+ session: None,
+ };
+ }
+ }
+ }
+
+ if let Some(current_session) = &mut session
+ && let Err(error) = current_session.process(parsed, self, event_stream, cx)
+ {
+ log::error!("Failed to process edit: {}", error);
+ return EditSessionResult::Failed { error, session };
+ }
+ }
+ }
+ ToolInputPayload::Full(full_input) => {
+ let mut session = if let Some(session) = session {
+ session
+ } else {
+ match EditSession::new(
+ full_input.path.clone(),
+ &full_input.display_description,
+ full_input.mode,
+ self,
+ event_stream,
+ cx,
+ )
+ .await
+ {
+ Ok(created_session) => created_session,
+ Err(error) => {
+ log::error!("Failed to create edit session: {}", error);
+ return EditSessionResult::Failed {
+ error,
+ session: None,
+ };
+ }
+ }
+ };
+
+ return match session.finalize(full_input, self, event_stream, cx).await {
+ Ok(()) => EditSessionResult::Completed(session),
+ Err(error) => {
+ log::error!("Failed to finalize edit: {}", error);
+ EditSessionResult::Failed {
+ error,
+ session: Some(session),
+ }
+ }
+ };
+ }
+ ToolInputPayload::InvalidJson { error_message } => {
+ log::error!("Received invalid JSON: {error_message}");
+ return EditSessionResult::Failed {
+ error: error_message,
+ session,
+ };
+ }
+ },
+ Err(error) => {
+ return EditSessionResult::Failed {
+ error: format!("Failed to receive tool input: {error}"),
+ session,
+ };
+ }
+ }
+ }
+ _ = event_stream.cancelled_by_user().fuse() => {
+ return EditSessionResult::Failed {
+ error: "Edit cancelled by user".to_string(),
+ session,
+ };
+ }
+ }
+ }
+ }
}
impl AgentTool for StreamingEditFileTool {
@@ -348,94 +532,40 @@ impl AgentTool for StreamingEditFileTool {
cx: &mut App,
) -> Task<Result<Self::Output, Self::Output>> {
cx.spawn(async move |cx: &mut AsyncApp| {
- let mut state: Option<EditSession> = None;
- let mut last_partial: Option<StreamingEditFileToolPartialInput> = None;
- loop {
- futures::select! {
- partial = input.recv_partial().fuse() => {
- let Some(partial_value) = partial else { break };
- if let Ok(parsed) = serde_json::from_value::<StreamingEditFileToolPartialInput>(partial_value) {
- let path_complete = parsed.path.is_some()
- && parsed.path.as_ref() == last_partial.as_ref().and_then(|p| p.path.as_ref());
-
- last_partial = Some(parsed.clone());
-
- if state.is_none()
- && path_complete
- && let StreamingEditFileToolPartialInput {
- path: Some(path),
- display_description: Some(display_description),
- mode: Some(mode),
- ..
- } = &parsed
- {
- match EditSession::new(
- &PathBuf::from(path),
- display_description,
- *mode,
- &self,
- &event_stream,
- cx,
- )
- .await
- {
- Ok(session) => state = Some(session),
- Err(e) => {
- log::error!("Failed to create edit session: {}", e);
- return Err(e);
- }
- }
- }
-
- if let Some(state) = &mut state {
- if let Err(e) = state.process(parsed, &self, &event_stream, cx) {
- log::error!("Failed to process edit: {}", e);
- return Err(e);
- }
- }
- }
- }
- _ = event_stream.cancelled_by_user().fuse() => {
- return Err(StreamingEditFileToolOutput::error("Edit cancelled by user"));
- }
- }
- }
- let full_input =
- input
- .recv()
- .await
- .map_err(|e| {
- let err = StreamingEditFileToolOutput::error(format!("Failed to receive tool input: {e}"));
- log::error!("Failed to receive tool input: {e}");
- err
- })?;
-
- let mut state = if let Some(state) = state {
- state
- } else {
- match EditSession::new(
- &full_input.path,
- &full_input.display_description,
- full_input.mode,
- &self,
- &event_stream,
- cx,
- )
+ match self
+ .process_streaming_edits(&mut input, &event_stream, cx)
.await
- {
- Ok(session) => session,
- Err(e) => {
- log::error!("Failed to create edit session: {}", e);
- return Err(e);
- }
+ {
+ EditSessionResult::Completed(session) => {
+ self.ensure_buffer_saved(&session.buffer, cx).await;
+ let (new_text, diff) = session.compute_new_text_and_diff(cx).await;
+ Ok(StreamingEditFileToolOutput::Success {
+ old_text: session.old_text.clone(),
+ new_text,
+ input_path: session.input_path,
+ diff,
+ })
}
- };
- match state.finalize(full_input, &self, &event_stream, cx).await {
- Ok(output) => Ok(output),
- Err(e) => {
- log::error!("Failed to finalize edit: {}", e);
- Err(e)
+ EditSessionResult::Failed {
+ error,
+ session: Some(session),
+ } => {
+ self.ensure_buffer_saved(&session.buffer, cx).await;
+ let (_new_text, diff) = session.compute_new_text_and_diff(cx).await;
+ Err(StreamingEditFileToolOutput::Error {
+ error,
+ input_path: Some(session.input_path),
+ diff,
+ })
}
+ EditSessionResult::Failed {
+ error,
+ session: None,
+ } => Err(StreamingEditFileToolOutput::Error {
+ error,
+ input_path: None,
+ diff: String::new(),
+ }),
}
})
}
@@ -472,6 +602,7 @@ impl AgentTool for StreamingEditFileTool {
pub struct EditSession {
abs_path: PathBuf,
+ input_path: PathBuf,
buffer: Entity<Buffer>,
old_text: Arc<String>,
diff: Entity<Diff>,
@@ -518,23 +649,21 @@ impl EditPipeline {
impl EditSession {
async fn new(
- path: &PathBuf,
+ path: PathBuf,
display_description: &str,
mode: StreamingEditFileMode,
tool: &StreamingEditFileTool,
event_stream: &ToolCallEventStream,
cx: &mut AsyncApp,
- ) -> Result<Self, StreamingEditFileToolOutput> {
- let project_path = cx
- .update(|cx| resolve_path(mode, &path, &tool.project, cx))
- .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?;
+ ) -> Result<Self, String> {
+ let project_path = cx.update(|cx| resolve_path(mode, &path, &tool.project, cx))?;
let Some(abs_path) = cx.update(|cx| tool.project.read(cx).absolute_path(&project_path, cx))
else {
- return Err(StreamingEditFileToolOutput::error(format!(
+ return Err(format!(
"Worktree at '{}' does not exist",
path.to_string_lossy()
- )));
+ ));
};
event_stream.update_fields(
@@ -543,13 +672,13 @@ impl EditSession {
cx.update(|cx| tool.authorize(&path, &display_description, event_stream, cx))
.await
- .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?;
+ .map_err(|e| e.to_string())?;
let buffer = tool
.project
.update(cx, |project, cx| project.open_buffer(project_path, cx))
.await
- .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?;
+ .map_err(|e| e.to_string())?;
ensure_buffer_saved(&buffer, &abs_path, tool, cx)?;
@@ -578,6 +707,7 @@ impl EditSession {
Ok(Self {
abs_path,
+ input_path: path,
buffer,
old_text,
diff,
@@ -594,22 +724,20 @@ impl EditSession {
tool: &StreamingEditFileTool,
event_stream: &ToolCallEventStream,
cx: &mut AsyncApp,
- ) -> Result<StreamingEditFileToolOutput, StreamingEditFileToolOutput> {
- let old_text = self.old_text.clone();
-
+ ) -> Result<(), String> {
match input.mode {
StreamingEditFileMode::Write => {
- let content = input.content.ok_or_else(|| {
- StreamingEditFileToolOutput::error("'content' field is required for write mode")
- })?;
+ let content = input
+ .content
+ .ok_or_else(|| "'content' field is required for write mode".to_string())?;
let events = self.parser.finalize_content(&content);
self.process_events(&events, tool, event_stream, cx)?;
}
StreamingEditFileMode::Edit => {
- let edits = input.edits.ok_or_else(|| {
- StreamingEditFileToolOutput::error("'edits' field is required for edit mode")
- })?;
+ let edits = input
+ .edits
+ .ok_or_else(|| "'edits' field is required for edit mode".to_string())?;
let events = self.parser.finalize_edits(&edits);
self.process_events(&events, tool, event_stream, cx)?;
@@ -625,53 +753,15 @@ impl EditSession {
}
}
}
+ Ok(())
+ }
- let format_on_save_enabled = self.buffer.read_with(cx, |buffer, cx| {
- let settings = language_settings::LanguageSettings::for_buffer(buffer, cx);
- settings.format_on_save != FormatOnSave::Off
- });
-
- if format_on_save_enabled {
- tool.action_log.update(cx, |log, cx| {
- log.buffer_edited(self.buffer.clone(), cx);
- });
-
- let format_task = tool.project.update(cx, |project, cx| {
- project.format(
- HashSet::from_iter([self.buffer.clone()]),
- LspFormatTarget::Buffers,
- false,
- FormatTrigger::Save,
- cx,
- )
- });
- futures::select! {
- result = format_task.fuse() => { result.log_err(); },
- _ = event_stream.cancelled_by_user().fuse() => {
- return Err(StreamingEditFileToolOutput::error("Edit cancelled by user"));
- }
- };
- }
-
- let save_task = tool.project.update(cx, |project, cx| {
- project.save_buffer(self.buffer.clone(), cx)
- });
- futures::select! {
- result = save_task.fuse() => { result.map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; },
- _ = event_stream.cancelled_by_user().fuse() => {
- return Err(StreamingEditFileToolOutput::error("Edit cancelled by user"));
- }
- };
-
- tool.action_log.update(cx, |log, cx| {
- log.buffer_edited(self.buffer.clone(), cx);
- });
-
+ async fn compute_new_text_and_diff(&self, cx: &mut AsyncApp) -> (String, String) {
let new_snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
let (new_text, unified_diff) = cx
.background_spawn({
let new_snapshot = new_snapshot.clone();
- let old_text = old_text.clone();
+ let old_text = self.old_text.clone();
async move {
let new_text = new_snapshot.text();
let diff = language::unified_diff(&old_text, &new_text);
@@ -679,14 +769,7 @@ impl EditSession {
}
})
.await;
-
- let output = StreamingEditFileToolOutput::Success {
- input_path: input.path,
- new_text,
- old_text: old_text.clone(),
- diff: unified_diff,
- };
- Ok(output)
+ (new_text, unified_diff)
}
fn process(
@@ -695,7 +778,7 @@ impl EditSession {
tool: &StreamingEditFileTool,
event_stream: &ToolCallEventStream,
cx: &mut AsyncApp,
- ) -> Result<(), StreamingEditFileToolOutput> {
+ ) -> Result<(), String> {
match &self.mode {
StreamingEditFileMode::Write => {
if let Some(content) = &partial.content {
@@ -719,7 +802,7 @@ impl EditSession {
tool: &StreamingEditFileTool,
event_stream: &ToolCallEventStream,
cx: &mut AsyncApp,
- ) -> Result<(), StreamingEditFileToolOutput> {
+ ) -> Result<(), String> {
for event in events {
match event {
ToolEditEvent::ContentChunk { chunk } => {
@@ -760,7 +843,7 @@ impl EditSession {
{
if let Some(match_range) = matcher.push(chunk, None) {
let anchor_range = self.buffer.read_with(cx, |buffer, _cx| {
- buffer.anchor_range_between(match_range.clone())
+ buffer.anchor_range_outside(match_range.clone())
});
self.diff
.update(cx, |diff, cx| diff.reveal_range(anchor_range, cx));
@@ -795,7 +878,7 @@ impl EditSession {
let anchor_range = self
.buffer
- .read_with(cx, |buffer, _cx| buffer.anchor_range_between(range.clone()));
+ .read_with(cx, |buffer, _cx| buffer.anchor_range_outside(range.clone()));
self.diff
.update(cx, |diff, cx| diff.reveal_range(anchor_range, cx));
@@ -953,7 +1036,7 @@ fn apply_char_operations(
}
CharOperation::Delete { bytes } => {
let delete_end = *edit_cursor + bytes;
- let anchor_range = snapshot.anchor_range_around(*edit_cursor..delete_end);
+ let anchor_range = snapshot.anchor_range_inside(*edit_cursor..delete_end);
agent_edit_buffer(&buffer, [(anchor_range, "")], action_log, cx);
*edit_cursor = delete_end;
}
@@ -969,14 +1052,14 @@ fn extract_match(
buffer: &Entity<Buffer>,
edit_index: &usize,
cx: &mut AsyncApp,
-) -> Result<Range<usize>, StreamingEditFileToolOutput> {
+) -> Result<Range<usize>, String> {
match matches.len() {
- 0 => Err(StreamingEditFileToolOutput::error(format!(
+ 0 => Err(format!(
"Could not find matching text for edit at index {}. \
The old_text did not match any content in the file. \
Please read the file again to get the current content.",
edit_index,
- ))),
+ )),
1 => Ok(matches.into_iter().next().unwrap()),
_ => {
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
@@ -985,12 +1068,12 @@ fn extract_match(
.map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string())
.collect::<Vec<_>>()
.join(", ");
- Err(StreamingEditFileToolOutput::error(format!(
+ Err(format!(
"Edit {} matched multiple locations in the file at lines: {}. \
Please provide more context in old_text to uniquely \
identify the location.",
edit_index, lines
- )))
+ ))
}
}
}
@@ -1022,7 +1105,7 @@ fn ensure_buffer_saved(
abs_path: &PathBuf,
tool: &StreamingEditFileTool,
cx: &mut AsyncApp,
-) -> Result<(), StreamingEditFileToolOutput> {
+) -> Result<(), String> {
let last_read_mtime = tool
.action_log
.read_with(cx, |log, _| log.file_read_time(abs_path));
@@ -1063,15 +1146,14 @@ fn ensure_buffer_saved(
then ask them to save or revert the file manually and inform you when it's ok to proceed."
}
};
- return Err(StreamingEditFileToolOutput::error(message));
+ return Err(message.to_string());
}
if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) {
if current != last_read {
- return Err(StreamingEditFileToolOutput::error(
- "The file has been modified since you last read it. \
- Please read the file again to get the current state before editing it.",
- ));
+ return Err("The file has been modified since you last read it. \
+ Please read the file again to get the current state before editing it."
+ .to_string());
}
}
@@ -1083,56 +1165,63 @@ fn resolve_path(
path: &PathBuf,
project: &Entity<Project>,
cx: &mut App,
-) -> Result<ProjectPath> {
+) -> Result<ProjectPath, String> {
let project = project.read(cx);
match mode {
StreamingEditFileMode::Edit => {
let path = project
.find_project_path(&path, cx)
- .context("Can't edit file: path not found")?;
+ .ok_or_else(|| "Can't edit file: path not found".to_string())?;
let entry = project
.entry_for_path(&path, cx)
- .context("Can't edit file: path not found")?;
+ .ok_or_else(|| "Can't edit file: path not found".to_string())?;
- anyhow::ensure!(entry.is_file(), "Can't edit file: path is a directory");
- Ok(path)
+ if entry.is_file() {
+ Ok(path)
+ } else {
+ Err("Can't edit file: path is a directory".to_string())
+ }
}
StreamingEditFileMode::Write => {
if let Some(path) = project.find_project_path(&path, cx)
&& let Some(entry) = project.entry_for_path(&path, cx)
{
- anyhow::ensure!(entry.is_file(), "Can't write to file: path is a directory");
- return Ok(path);
+ if entry.is_file() {
+ return Ok(path);
+ } else {
+ return Err("Can't write to file: path is a directory".to_string());
+ }
}
- let parent_path = path.parent().context("Can't create file: incorrect path")?;
+ let parent_path = path
+ .parent()
+ .ok_or_else(|| "Can't create file: incorrect path".to_string())?;
let parent_project_path = project.find_project_path(&parent_path, cx);
let parent_entry = parent_project_path
.as_ref()
.and_then(|path| project.entry_for_path(path, cx))
- .context("Can't create file: parent directory doesn't exist")?;
+ .ok_or_else(|| "Can't create file: parent directory doesn't exist")?;
- anyhow::ensure!(
- parent_entry.is_dir(),
- "Can't create file: parent is not a directory"
- );
+ if !parent_entry.is_dir() {
+ return Err("Can't create file: parent is not a directory".to_string());
+ }
let file_name = path
.file_name()
.and_then(|file_name| file_name.to_str())
.and_then(|file_name| RelPath::unix(file_name).ok())
- .context("Can't create file: invalid filename")?;
+ .ok_or_else(|| "Can't create file: invalid filename".to_string())?;
let new_file_path = parent_project_path.map(|parent| ProjectPath {
path: parent.path.join(file_name),
..parent
});
- new_file_path.context("Can't create file")
+ new_file_path.ok_or_else(|| "Can't create file".to_string())
}
}
}
@@ -1382,10 +1471,17 @@ mod tests {
})
.await;
- let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else {
+ let StreamingEditFileToolOutput::Error {
+ error,
+ diff,
+ input_path,
+ } = result.unwrap_err()
+ else {
panic!("expected error");
};
assert_eq!(error, "Can't edit file: path not found");
+ assert!(diff.is_empty());
+ assert_eq!(input_path, None);
}
#[gpui::test]
@@ -1411,7 +1507,7 @@ mod tests {
})
.await;
- let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else {
+ let StreamingEditFileToolOutput::Error { error, .. } = result.unwrap_err() else {
panic!("expected error");
};
assert!(
@@ -1424,7 +1520,7 @@ mod tests {
async fn test_streaming_early_buffer_open(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await;
- let (sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
+ let (mut sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1447,7 +1543,7 @@ mod tests {
cx.run_until_parked();
// Now send the final complete input
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Edit lines",
"path": "root/file.txt",
"mode": "edit",
@@ -1465,7 +1561,7 @@ mod tests {
async fn test_streaming_path_completeness_heuristic(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "hello world"})).await;
- let (sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
+ let (mut sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1485,7 +1581,7 @@ mod tests {
cx.run_until_parked();
// Send final
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Overwrite file",
"path": "root/file.txt",
"mode": "write",
@@ -1503,7 +1599,7 @@ mod tests {
async fn test_streaming_cancellation_during_partials(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "hello world"})).await;
- let (sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
+ let (mut sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
let (event_stream, _receiver, mut cancellation_tx) =
ToolCallEventStream::test_with_cancellation();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1521,7 +1617,7 @@ mod tests {
drop(sender);
let result = task.await;
- let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else {
+ let StreamingEditFileToolOutput::Error { error, .. } = result.unwrap_err() else {
panic!("expected error");
};
assert!(
@@ -1537,7 +1633,7 @@ mod tests {
json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}),
)
.await;
- let (sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
+ let (mut sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1578,7 +1674,7 @@ mod tests {
cx.run_until_parked();
// Send final complete input
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Edit multiple lines",
"path": "root/file.txt",
"mode": "edit",
@@ -1601,7 +1697,7 @@ mod tests {
#[gpui::test]
async fn test_streaming_create_file_with_partials(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await;
- let (sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
+ let (mut sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1625,7 +1721,7 @@ mod tests {
cx.run_until_parked();
// Final with full content
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Create new file",
"path": "root/dir/new_file.txt",
"mode": "write",
@@ -1643,12 +1739,12 @@ mod tests {
async fn test_streaming_no_partials_direct_final(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await;
- let (sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
+ let (mut sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Send final immediately with no partials (simulates non-streaming path)
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Edit lines",
"path": "root/file.txt",
"mode": "edit",
@@ -1669,7 +1765,7 @@ mod tests {
json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}),
)
.await;
- let (sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
+ let (mut sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1739,7 +1835,7 @@ mod tests {
);
// Send final complete input
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Edit multiple lines",
"path": "root/file.txt",
"mode": "edit",
@@ -1767,7 +1863,7 @@ mod tests {
async fn test_streaming_incremental_three_edits(cx: &mut TestAppContext) {
let (tool, project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "aaa\nbbb\nccc\nddd\neee\n"})).await;
- let (sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
+ let (mut sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1835,7 +1931,7 @@ mod tests {
assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nCCC\nddd\nEEEeee\n"));
// Send final
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Edit three lines",
"path": "root/file.txt",
"mode": "edit",
@@ -1857,7 +1953,7 @@ mod tests {
async fn test_streaming_edit_failure_mid_stream(cx: &mut TestAppContext) {
let (tool, project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await;
- let (sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
+ let (mut sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1893,16 +1989,17 @@ mod tests {
}));
cx.run_until_parked();
- // Verify edit 1 was applied
- let buffer_text = project.update(cx, |project, cx| {
+ let buffer = project.update(cx, |project, cx| {
let pp = project
.find_project_path(&PathBuf::from("root/file.txt"), cx)
.unwrap();
- project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text())
+ project.get_open_buffer(&pp, cx).unwrap()
});
+
+ // Verify edit 1 was applied
+ let buffer_text = buffer.read_with(cx, |buffer, _cx| buffer.text());
assert_eq!(
- buffer_text.as_deref(),
- Some("MODIFIED\nline 2\nline 3\n"),
+ buffer_text, "MODIFIED\nline 2\nline 3\n",
"First edit should be applied even though second edit will fail"
);
@@ -1925,20 +2022,32 @@ mod tests {
drop(sender);
let result = task.await;
- let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else {
+ let StreamingEditFileToolOutput::Error {
+ error,
+ diff,
+ input_path,
+ } = result.unwrap_err()
+ else {
panic!("expected error");
};
+
assert!(
error.contains("Could not find matching text for edit at index 1"),
"Expected error about edit 1 failing, got: {error}"
);
+ // Ensure that first edit was applied successfully and that we saved the buffer
+ assert_eq!(input_path, Some(PathBuf::from("root/file.txt")));
+ assert_eq!(
+ diff,
+ "@@ -1,3 +1,3 @@\n-line 1\n+MODIFIED\n line 2\n line 3\n"
+ );
}
#[gpui::test]
async fn test_streaming_single_edit_no_incremental(cx: &mut TestAppContext) {
let (tool, project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "hello world\n"})).await;
- let (sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
+ let (mut sender, input) = ToolInput::<StreamingEditFileToolInput>::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -1975,7 +2084,7 @@ mod tests {
);
// Send final — the edit is applied during finalization
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Single edit",
"path": "root/file.txt",
"mode": "edit",
@@ -1993,7 +2102,7 @@ mod tests {
async fn test_streaming_input_partials_then_final(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await;
- let (sender, input): (ToolInputSender, ToolInput<StreamingEditFileToolInput>) =
+ let (mut sender, input): (ToolInputSender, ToolInput<StreamingEditFileToolInput>) =
ToolInput::test();
let (event_stream, _event_rx) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -2020,7 +2129,7 @@ mod tests {
cx.run_until_parked();
// Send the final complete input
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Edit lines",
"path": "root/file.txt",
"mode": "edit",
@@ -2038,7 +2147,7 @@ mod tests {
async fn test_streaming_input_sender_dropped_before_final(cx: &mut TestAppContext) {
let (tool, _project, _action_log, _fs, _thread) =
setup_test(cx, json!({"file.txt": "hello world\n"})).await;
- let (sender, input): (ToolInputSender, ToolInput<StreamingEditFileToolInput>) =
+ let (mut sender, input): (ToolInputSender, ToolInput<StreamingEditFileToolInput>) =
ToolInput::test();
let (event_stream, _event_rx) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -2064,7 +2173,7 @@ mod tests {
// Create a channel and send multiple partials before a final, then use
// ToolInput::resolved-style immediate delivery to confirm recv() works
// when partials are already buffered.
- let (sender, input): (ToolInputSender, ToolInput<StreamingEditFileToolInput>) =
+ let (mut sender, input): (ToolInputSender, ToolInput<StreamingEditFileToolInput>) =
ToolInput::test();
let (event_stream, _event_rx) = ToolCallEventStream::test();
let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
@@ -2077,7 +2186,7 @@ mod tests {
"path": "root/dir/new.txt",
"mode": "write"
}));
- sender.send_final(json!({
+ sender.send_full(json!({
"display_description": "Create",
"path": "root/dir/new.txt",
"mode": "write",
@@ -681,17 +681,17 @@ mod tests {
);
assert!(
!matches!(
- rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"invalid command should not request authorization"
);
assert!(
!matches!(
- rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallUpdate(
+ rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallUpdate(
acp_thread::ToolCallUpdate::UpdateFields(_)
- ))))
+ )))
),
"invalid command should not emit a terminal card update"
);
@@ -810,8 +810,8 @@ mod tests {
);
assert!(
!matches!(
- rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"hardcoded denial should not request authorization"
);
@@ -1058,8 +1058,8 @@ mod tests {
);
assert!(
!matches!(
- rx.try_next(),
- Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_))))
+ rx.try_recv(),
+ Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))
),
"rejected command {command:?} should not request authorization"
);
@@ -32,7 +32,6 @@ futures.workspace = true
gpui.workspace = true
feature_flags.workspace = true
gpui_tokio = { workspace = true, optional = true }
-credentials_provider.workspace = true
google_ai.workspace = true
http_client.workspace = true
indoc.workspace = true
@@ -53,6 +52,7 @@ terminal.workspace = true
uuid.workspace = true
util.workspace = true
watch.workspace = true
+zed_credentials_provider.workspace = true
[target.'cfg(unix)'.dependencies]
libc.workspace = true
@@ -3,7 +3,6 @@ use acp_thread::AgentConnection;
use agent_client_protocol as acp;
use anyhow::{Context as _, Result};
use collections::HashSet;
-use credentials_provider::CredentialsProvider;
use fs::Fs;
use gpui::{App, AppContext as _, Entity, Task};
use language_model::{ApiKey, EnvVar};
@@ -392,7 +391,7 @@ fn api_key_for_gemini_cli(cx: &mut App) -> Task<Result<String>> {
if let Some(key) = env_var.value {
return Task::ready(Ok(key));
}
- let credentials_provider = <dyn CredentialsProvider>::global(cx);
+ let credentials_provider = zed_credentials_provider::global(cx);
let api_url = google_ai::API_URL.to_string();
cx.spawn(async move |cx| {
Ok(
@@ -1,6 +1,7 @@
use crate::{AgentServer, AgentServerDelegate};
use acp_thread::{AcpThread, AgentThreadEntry, ToolCall, ToolCallStatus};
use agent_client_protocol as acp;
+use client::RefreshLlmTokenListener;
use futures::{FutureExt, StreamExt, channel::mpsc, select};
use gpui::AppContext;
use gpui::{Entity, TestAppContext};
@@ -413,7 +414,8 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
cx.set_http_client(Arc::new(http_client));
let client = client::Client::production(cx);
let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx));
- language_model::init(user_store, client, cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store, cx);
#[cfg(test)]
project::agent_server_store::AllAgentServersSettings::override_global(
@@ -13,8 +13,8 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{
DockPosition, DockSide, LanguageModelParameters, LanguageModelSelection, NewThreadLocation,
- NotifyWhenAgentWaiting, RegisterSetting, Settings, SettingsContent, SettingsStore,
- SidebarDockPosition, SidebarSide, ThinkingBlockDisplay, ToolPermissionMode,
+ NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, RegisterSetting, Settings, SettingsContent,
+ SettingsStore, SidebarDockPosition, SidebarSide, ThinkingBlockDisplay, ToolPermissionMode,
update_settings_file,
};
@@ -165,7 +165,7 @@ pub struct AgentSettings {
pub profiles: IndexMap<AgentProfileId, AgentProfileSettings>,
pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
- pub play_sound_when_agent_done: bool,
+ pub play_sound_when_agent_done: PlaySoundWhenAgentDone,
pub single_file_review: bool,
pub model_parameters: Vec<LanguageModelParameters>,
pub enable_feedback: bool,
@@ -176,6 +176,7 @@ pub struct AgentSettings {
pub use_modifier_to_send: bool,
pub message_editor_min_lines: usize,
pub show_turn_stats: bool,
+ pub show_merge_conflict_indicator: bool,
pub tool_permissions: ToolPermissions,
pub new_thread_location: NewThreadLocation,
}
@@ -618,7 +619,7 @@ impl Settings for AgentSettings {
.collect(),
notify_when_agent_waiting: agent.notify_when_agent_waiting.unwrap(),
- play_sound_when_agent_done: agent.play_sound_when_agent_done.unwrap(),
+ play_sound_when_agent_done: agent.play_sound_when_agent_done.unwrap_or_default(),
single_file_review: agent.single_file_review.unwrap(),
model_parameters: agent.model_parameters,
enable_feedback: agent.enable_feedback.unwrap(),
@@ -629,6 +630,7 @@ impl Settings for AgentSettings {
use_modifier_to_send: agent.use_modifier_to_send.unwrap(),
message_editor_min_lines: agent.message_editor_min_lines.unwrap(),
show_turn_stats: agent.show_turn_stats.unwrap(),
+ show_merge_conflict_indicator: agent.show_merge_conflict_indicator.unwrap(),
tool_permissions: compile_tool_permissions(agent.tool_permissions),
new_thread_location: agent.new_thread_location.unwrap_or_default(),
}
@@ -202,6 +202,7 @@ impl ModelInput {
.text(cx)
.parse::<u64>()
.map_err(|_| SharedString::from("Max Tokens must be a number"))?,
+ reasoning_effort: None,
capabilities: ModelCapabilities {
tools: self.capabilities.supports_tools.selected(),
images: self.capabilities.supports_images.selected(),
@@ -815,7 +816,7 @@ mod tests {
cx.set_global(store);
theme_settings::init(theme::LoadThemes::JustBase, cx);
- language_model::init_settings(cx);
+ language_model::init(cx);
editor::init(cx);
});
@@ -138,11 +138,12 @@ impl AgentDiffPane {
path_a.cmp(&path_b)
});
- let mut paths_to_delete = self
+ let mut buffers_to_delete = self
.multibuffer
.read(cx)
- .paths()
- .cloned()
+ .snapshot(cx)
+ .excerpts()
+ .map(|excerpt| excerpt.context.start.buffer_id)
.collect::<HashSet<_>>();
for (buffer, diff_handle) in sorted_buffers {
@@ -151,7 +152,7 @@ impl AgentDiffPane {
}
let path_key = PathKey::for_buffer(&buffer, cx);
- paths_to_delete.remove(&path_key);
+ buffers_to_delete.remove(&buffer.read(cx).remote_id());
let snapshot = buffer.read(cx).snapshot();
@@ -168,7 +169,7 @@ impl AgentDiffPane {
let (was_empty, is_excerpt_newly_added) =
self.multibuffer.update(cx, |multibuffer, cx| {
let was_empty = multibuffer.is_empty();
- let (_, is_excerpt_newly_added) = multibuffer.set_excerpts_for_path(
+ let is_excerpt_newly_added = multibuffer.update_excerpts_for_path(
path_key.clone(),
buffer.clone(),
diff_hunk_ranges,
@@ -183,13 +184,13 @@ impl AgentDiffPane {
if was_empty {
let first_hunk = editor
.diff_hunks_in_ranges(
- &[editor::Anchor::min()..editor::Anchor::max()],
+ &[editor::Anchor::Min..editor::Anchor::Max],
&self.multibuffer.read(cx).read(cx),
)
.next();
if let Some(first_hunk) = first_hunk {
- let first_hunk_start = first_hunk.multi_buffer_range().start;
+ let first_hunk_start = first_hunk.multi_buffer_range.start;
editor.change_selections(Default::default(), window, cx, |selections| {
selections.select_anchor_ranges([first_hunk_start..first_hunk_start]);
})
@@ -208,8 +209,8 @@ impl AgentDiffPane {
}
self.multibuffer.update(cx, |multibuffer, cx| {
- for path in paths_to_delete {
- multibuffer.remove_excerpts_for_path(path, cx);
+ for buffer_id in buffers_to_delete {
+ multibuffer.remove_excerpts_for_buffer(buffer_id, cx);
}
});
@@ -239,13 +240,13 @@ impl AgentDiffPane {
self.editor.update(cx, |editor, cx| {
let first_hunk = editor
.diff_hunks_in_ranges(
- &[position..editor::Anchor::max()],
+ &[position..editor::Anchor::Max],
&self.multibuffer.read(cx).read(cx),
)
.next();
if let Some(first_hunk) = first_hunk {
- let first_hunk_start = first_hunk.multi_buffer_range().start;
+ let first_hunk_start = first_hunk.multi_buffer_range.start;
editor.change_selections(Default::default(), window, cx, |selections| {
selections.select_anchor_ranges([first_hunk_start..first_hunk_start]);
})
@@ -282,7 +283,7 @@ impl AgentDiffPane {
editor,
&snapshot,
&self.thread,
- vec![editor::Anchor::min()..editor::Anchor::max()],
+ vec![editor::Anchor::Min..editor::Anchor::Max],
self.workspace.clone(),
window,
cx,
@@ -451,20 +452,20 @@ fn update_editor_selection(
diff_hunks
.last()
.and_then(|last_kept_hunk| {
- let last_kept_hunk_end = last_kept_hunk.multi_buffer_range().end;
+ let last_kept_hunk_end = last_kept_hunk.multi_buffer_range.end;
editor
.diff_hunks_in_ranges(
- &[last_kept_hunk_end..editor::Anchor::max()],
+ &[last_kept_hunk_end..editor::Anchor::Max],
buffer_snapshot,
)
.nth(1)
})
.or_else(|| {
let first_kept_hunk = diff_hunks.first()?;
- let first_kept_hunk_start = first_kept_hunk.multi_buffer_range().start;
+ let first_kept_hunk_start = first_kept_hunk.multi_buffer_range.start;
editor
.diff_hunks_in_ranges(
- &[editor::Anchor::min()..first_kept_hunk_start],
+ &[editor::Anchor::Min..first_kept_hunk_start],
buffer_snapshot,
)
.next()
@@ -473,7 +474,7 @@ fn update_editor_selection(
if let Some(target_hunk) = target_hunk {
editor.change_selections(Default::default(), window, cx, |selections| {
- let next_hunk_start = target_hunk.multi_buffer_range().start;
+ let next_hunk_start = target_hunk.multi_buffer_range.start;
selections.select_anchor_ranges([next_hunk_start..next_hunk_start]);
})
}
@@ -1567,7 +1568,7 @@ impl AgentDiff {
editor.update(cx, |editor, cx| {
let snapshot = multibuffer.read(cx).snapshot(cx);
if let Some(first_hunk) = snapshot.diff_hunks().next() {
- let first_hunk_start = first_hunk.multi_buffer_range().start;
+ let first_hunk_start = first_hunk.multi_buffer_range.start;
editor.change_selections(
SelectionEffects::scroll(Autoscroll::center()),
@@ -1648,7 +1649,7 @@ impl AgentDiff {
editor,
&snapshot,
thread,
- vec![editor::Anchor::min()..editor::Anchor::max()],
+ vec![editor::Anchor::Min..editor::Anchor::Max],
window,
cx,
);
@@ -1669,7 +1670,7 @@ impl AgentDiff {
editor,
&snapshot,
thread,
- vec![editor::Anchor::min()..editor::Anchor::max()],
+ vec![editor::Anchor::Min..editor::Anchor::Max],
workspace.clone(),
window,
cx,
@@ -1808,7 +1809,7 @@ mod tests {
cx.set_global(settings_store);
prompt_store::init(cx);
theme_settings::init(theme::LoadThemes::JustBase, cx);
- language_model::init_settings(cx);
+ language_model::init(cx);
});
let fs = FakeFs::new(cx.executor());
@@ -1965,7 +1966,7 @@ mod tests {
cx.set_global(settings_store);
prompt_store::init(cx);
theme_settings::init(theme::LoadThemes::JustBase, cx);
- language_model::init_settings(cx);
+ language_model::init(cx);
workspace::register_project_item::<Editor>(cx);
});
@@ -25,6 +25,7 @@ use zed_actions::agent::{
ResolveConflictsWithAgent, ReviewBranchDiff,
};
+use crate::thread_metadata_store::ThreadMetadataStore;
use crate::{
AddContextServer, AgentDiffPane, ConversationView, CopyThreadToClipboard, CycleStartThreadIn,
Follow, InlineAssistant, LoadThreadFromClipboard, NewThread, OpenActiveThreadAsMarkdown,
@@ -753,28 +754,21 @@ impl AgentPanel {
.as_ref()
.and_then(|p| p.last_active_thread.as_ref())
{
- if thread_info.agent_type.is_native() {
- let session_id = acp::SessionId::new(thread_info.session_id.clone());
- let load_result = cx.update(|_window, cx| {
- let thread_store = ThreadStore::global(cx);
- thread_store.update(cx, |store, cx| store.load_thread(session_id, cx))
- });
- let thread_exists = if let Ok(task) = load_result {
- task.await.ok().flatten().is_some()
- } else {
- false
- };
- if thread_exists {
- Some(thread_info)
- } else {
- log::warn!(
- "last active thread {} not found in database, skipping restoration",
- thread_info.session_id
- );
- None
- }
- } else {
+ let session_id = acp::SessionId::new(thread_info.session_id.clone());
+ let has_metadata = cx
+ .update(|_window, cx| {
+ let store = ThreadMetadataStore::global(cx);
+ store.read(cx).entry(&session_id).is_some()
+ })
+ .unwrap_or(false);
+ if has_metadata {
Some(thread_info)
+ } else {
+ log::warn!(
+ "last active thread {} has no metadata, skipping restoration",
+ thread_info.session_id
+ );
+ None
}
} else {
None
@@ -1734,6 +1728,10 @@ impl AgentPanel {
return;
};
+ if thread_view.read(cx).thread.read(cx).entries().is_empty() {
+ return;
+ }
+
self.background_threads
.insert(thread_view.read(cx).id.clone(), conversation_view);
self.cleanup_background_threads(cx);
@@ -2078,6 +2076,10 @@ impl AgentPanel {
window: &mut Window,
cx: &mut Context<Self>,
) {
+ if let Some(store) = ThreadMetadataStore::try_global(cx) {
+ store.update(cx, |store, cx| store.unarchive(&session_id, cx));
+ }
+
if let Some(conversation_view) = self.background_threads.remove(&session_id) {
self.set_active_view(
ActiveView::AgentThread { conversation_view },
@@ -2588,7 +2590,7 @@ impl AgentPanel {
anyhow::Ok(())
});
- self._worktree_creation_task = Some(cx.foreground_executor().spawn(async move {
+ self._worktree_creation_task = Some(cx.background_spawn(async move {
task.await.log_err();
}));
}
@@ -2745,6 +2747,10 @@ impl AgentPanel {
new_window_handle.update(cx, |multi_workspace, window, cx| {
multi_workspace.activate(new_workspace.clone(), window, cx);
+
+ new_workspace.update(cx, |workspace, cx| {
+ workspace.run_create_worktree_tasks(window, cx);
+ })
})?;
this.update_in(cx, |this, window, cx| {
@@ -4297,6 +4303,8 @@ mod tests {
);
});
+ send_message(&panel_a, cx);
+
let agent_type_a = panel_a.read_with(cx, |panel, _cx| panel.selected_agent.clone());
// --- Set up workspace B: ClaudeCode, no active thread ---
@@ -4356,6 +4364,72 @@ mod tests {
});
}
+ #[gpui::test]
+ async fn test_non_native_thread_without_metadata_is_not_restored(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.update(|cx| {
+ cx.update_flags(true, vec!["agent-v2".to_string()]);
+ agent::ThreadStore::init_global(cx);
+ language_model::LanguageModelRegistry::test(cx);
+ });
+
+ let fs = FakeFs::new(cx.executor());
+ let project = Project::test(fs, [], cx).await;
+
+ let multi_workspace =
+ cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+
+ let workspace = multi_workspace
+ .read_with(cx, |multi_workspace, _cx| {
+ multi_workspace.workspace().clone()
+ })
+ .unwrap();
+
+ workspace.update(cx, |workspace, _cx| {
+ workspace.set_random_database_id();
+ });
+
+ let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx);
+
+ let panel = workspace.update_in(cx, |workspace, window, cx| {
+ cx.new(|cx| AgentPanel::new(workspace, None, window, cx))
+ });
+
+ panel.update_in(cx, |panel, window, cx| {
+ panel.open_external_thread_with_server(
+ Rc::new(StubAgentServer::default_response()),
+ window,
+ cx,
+ );
+ });
+
+ cx.run_until_parked();
+
+ panel.read_with(cx, |panel, cx| {
+ assert!(
+ panel.active_agent_thread(cx).is_some(),
+ "should have an active thread after connection"
+ );
+ });
+
+ // Serialize without ever sending a message, so no thread metadata exists.
+ panel.update(cx, |panel, cx| panel.serialize(cx));
+ cx.run_until_parked();
+
+ let async_cx = cx.update(|window, cx| window.to_async(cx));
+ let loaded = AgentPanel::load(workspace.downgrade(), async_cx)
+ .await
+ .expect("panel load should succeed");
+ cx.run_until_parked();
+
+ loaded.read_with(cx, |panel, _cx| {
+ assert!(
+ panel.active_conversation_view().is_none(),
+ "thread without metadata should not be restored"
+ );
+ });
+ }
+
/// Extracts the text from a Text content block, panicking if it's not Text.
fn expect_text_block(block: &acp::ContentBlock) -> &str {
match block {
@@ -4698,6 +4772,38 @@ mod tests {
(panel, cx)
}
+ #[gpui::test]
+ async fn test_empty_draft_thread_not_retained_when_navigating_away(cx: &mut TestAppContext) {
+ let (panel, mut cx) = setup_panel(cx).await;
+
+ let connection_a = StubAgentConnection::new();
+ open_thread_with_connection(&panel, connection_a, &mut cx);
+ let session_id_a = active_session_id(&panel, &cx);
+
+ panel.read_with(&cx, |panel, cx| {
+ let thread = panel.active_agent_thread(cx).unwrap();
+ assert!(
+ thread.read(cx).entries().is_empty(),
+ "newly opened draft thread should have no entries"
+ );
+ assert!(panel.background_threads.is_empty());
+ });
+
+ let connection_b = StubAgentConnection::new();
+ open_thread_with_connection(&panel, connection_b, &mut cx);
+
+ panel.read_with(&cx, |panel, _cx| {
+ assert!(
+ panel.background_threads.is_empty(),
+ "empty draft thread should not be retained in background_threads"
+ );
+ assert!(
+ !panel.background_threads.contains_key(&session_id_a),
+ "empty draft thread should not be keyed in background_threads"
+ );
+ });
+ }
+
#[gpui::test]
async fn test_running_thread_retained_when_navigating_away(cx: &mut TestAppContext) {
let (panel, mut cx) = setup_panel(cx).await;
@@ -4809,6 +4915,7 @@ mod tests {
// Open thread B — thread A goes to background.
let connection_b = StubAgentConnection::new();
open_thread_with_connection(&panel, connection_b, &mut cx);
+ send_message(&panel, &mut cx);
let session_id_b = active_session_id(&panel, &cx);
@@ -5068,7 +5175,7 @@ mod tests {
multi_workspace
.read_with(cx, |multi_workspace, _cx| {
assert_eq!(
- multi_workspace.workspaces().len(),
+ multi_workspace.workspaces().count(),
1,
"LocalProject should not create a new workspace"
);
@@ -5344,6 +5451,11 @@ mod tests {
let multi_workspace =
cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ multi_workspace
+ .update(cx, |multi_workspace, _, cx| {
+ multi_workspace.open_sidebar(cx);
+ })
+ .unwrap();
let workspace = multi_workspace
.read_with(cx, |multi_workspace, _cx| {
@@ -5431,15 +5543,14 @@ mod tests {
.read_with(cx, |multi_workspace, cx| {
// There should be more than one workspace now (the original + the new worktree).
assert!(
- multi_workspace.workspaces().len() > 1,
+ multi_workspace.workspaces().count() > 1,
"expected a new workspace to have been created, found {}",
- multi_workspace.workspaces().len(),
+ multi_workspace.workspaces().count(),
);
// Check the newest workspace's panel for the correct agent.
let new_workspace = multi_workspace
.workspaces()
- .iter()
.find(|ws| ws.entity_id() != workspace.entity_id())
.expect("should find the new workspace");
let new_panel = new_workspace
@@ -173,6 +173,22 @@ actions!(
ToggleThinkingEffortMenu,
/// Toggles fast mode for models that support it.
ToggleFastMode,
+ /// Scroll the output by one page up.
+ ScrollOutputPageUp,
+ /// Scroll the output by one page down.
+ ScrollOutputPageDown,
+ /// Scroll the output up by three lines.
+ ScrollOutputLineUp,
+ /// Scroll the output down by three lines.
+ ScrollOutputLineDown,
+ /// Scroll the output to the top.
+ ScrollOutputToTop,
+ /// Scroll the output to the bottom.
+ ScrollOutputToBottom,
+ /// Scroll the output to the previous user message.
+ ScrollOutputToPreviousMessage,
+ /// Scroll the output to the next user message.
+ ScrollOutputToNextMessage,
]
);
@@ -674,7 +690,9 @@ mod tests {
use feature_flags::FeatureFlagAppExt;
use gpui::{BorrowAppContext, TestAppContext, px};
use project::DisableAiSettings;
- use settings::{DockPosition, NotifyWhenAgentWaiting, Settings, SettingsStore};
+ use settings::{
+ DockPosition, NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, Settings, SettingsStore,
+ };
#[gpui::test]
fn test_agent_command_palette_visibility(cx: &mut TestAppContext) {
@@ -705,7 +723,7 @@ mod tests {
default_profile: AgentProfileId::default(),
profiles: Default::default(),
notify_when_agent_waiting: NotifyWhenAgentWaiting::default(),
- play_sound_when_agent_done: false,
+ play_sound_when_agent_done: PlaySoundWhenAgentDone::Never,
single_file_review: false,
model_parameters: vec![],
enable_feedback: false,
@@ -716,6 +734,7 @@ mod tests {
message_editor_min_lines: 1,
tool_permissions: Default::default(),
show_turn_stats: false,
+ show_merge_conflict_indicator: true,
new_thread_location: Default::default(),
sidebar_side: Default::default(),
thinking_display: Default::default(),
@@ -303,7 +303,7 @@ impl CodegenAlternative {
let snapshot = buffer.read(cx).snapshot(cx);
let (old_buffer, _, _) = snapshot
- .range_to_buffer_ranges(range.start..=range.end)
+ .range_to_buffer_ranges(range.start..range.end)
.pop()
.unwrap();
let old_buffer = cx.new(|cx| {
@@ -684,7 +684,7 @@ impl CodegenAlternative {
let language_name = {
let multibuffer = self.buffer.read(cx);
let snapshot = multibuffer.snapshot(cx);
- let ranges = snapshot.range_to_buffer_ranges(self.range.start..=self.range.end);
+ let ranges = snapshot.range_to_buffer_ranges(self.range.start..self.range.end);
ranges
.first()
.and_then(|(buffer, _, _)| buffer.language())
@@ -9,9 +9,7 @@ use crate::ThreadHistory;
use acp_thread::MentionUri;
use agent_client_protocol as acp;
use anyhow::Result;
-use editor::{
- CompletionProvider, Editor, ExcerptId, code_context_menus::COMPLETION_MENU_MAX_WIDTH,
-};
+use editor::{CompletionProvider, Editor, code_context_menus::COMPLETION_MENU_MAX_WIDTH};
use futures::FutureExt as _;
use fuzzy::{PathMatch, StringMatch, StringMatchCandidate};
use gpui::{App, BackgroundExecutor, Entity, SharedString, Task, WeakEntity};
@@ -621,7 +619,7 @@ impl<T: PromptCompletionProviderDelegate> PromptCompletionProvider<T> {
for (terminal_text, terminal_range) in terminal_ranges {
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
let Some(start) =
- snapshot.as_singleton_anchor(source_range.start)
+ snapshot.anchor_in_excerpt(source_range.start)
else {
return;
};
@@ -1235,7 +1233,6 @@ impl<T: PromptCompletionProviderDelegate> PromptCompletionProvider<T> {
impl<T: PromptCompletionProviderDelegate> CompletionProvider for PromptCompletionProvider<T> {
fn completions(
&self,
- _excerpt_id: ExcerptId,
buffer: &Entity<Buffer>,
buffer_position: Anchor,
_trigger: CompletionContext,
@@ -2147,7 +2144,7 @@ fn build_code_label_for_path(
.theme()
.syntax()
.highlight_id("variable")
- .map(HighlightId);
+ .map(HighlightId::new);
let mut label = CodeLabelBuilder::default();
label.push_str(file, None);
@@ -85,8 +85,11 @@ use crate::{
AuthorizeToolCall, ClearMessageQueue, CycleFavoriteModels, CycleModeSelector,
CycleThinkingEffort, EditFirstQueuedMessage, ExpandMessageEditor, Follow, KeepAll, NewThread,
OpenAddContextMenu, OpenAgentDiff, OpenHistory, RejectAll, RejectOnce,
- RemoveFirstQueuedMessage, SendImmediately, SendNextQueuedMessage, ToggleFastMode,
- ToggleProfileSelector, ToggleThinkingEffortMenu, ToggleThinkingMode, UndoLastReject,
+ RemoveFirstQueuedMessage, ScrollOutputLineDown, ScrollOutputLineUp, ScrollOutputPageDown,
+ ScrollOutputPageUp, ScrollOutputToBottom, ScrollOutputToNextMessage,
+ ScrollOutputToPreviousMessage, ScrollOutputToTop, SendImmediately, SendNextQueuedMessage,
+ ToggleFastMode, ToggleProfileSelector, ToggleThinkingEffortMenu, ToggleThinkingMode,
+ UndoLastReject,
};
const STOPWATCH_THRESHOLD: Duration = Duration::from_secs(30);
@@ -809,7 +812,7 @@ impl ConversationView {
let agent_id = self.agent.agent_id();
let session_capabilities = Arc::new(RwLock::new(SessionCapabilities::new(
thread.read(cx).prompt_capabilities(),
- vec![],
+ thread.read(cx).available_commands().to_vec(),
)));
let action_log = thread.read(cx).action_log().clone();
@@ -828,6 +831,8 @@ impl ConversationView {
let count = thread.read(cx).entries().len();
let list_state = ListState::new(0, gpui::ListAlignment::Top, px(2048.0));
+ list_state.set_follow_mode(gpui::FollowMode::Tail);
+
entry_view_state.update(cx, |view_state, cx| {
for ix in 0..count {
view_state.sync_entry(ix, &thread, window, cx);
@@ -841,7 +846,7 @@ impl ConversationView {
if let Some(scroll_position) = thread.read(cx).ui_scroll_position() {
list_state.scroll_to(scroll_position);
} else {
- list_state.set_follow_tail(true);
+ list_state.scroll_to_end();
}
AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx);
@@ -1257,9 +1262,11 @@ impl ConversationView {
AcpThreadEvent::EntryUpdated(index) => {
if let Some(active) = self.thread_view(&thread_id) {
let entry_view_state = active.read(cx).entry_view_state.clone();
+ let list_state = active.read(cx).list_state.clone();
entry_view_state.update(cx, |view_state, cx| {
- view_state.sync_entry(*index, thread, window, cx)
+ view_state.sync_entry(*index, thread, window, cx);
});
+ list_state.remeasure_items(*index..*index + 1);
active.update(cx, |active, cx| {
active.auto_expand_streaming_thought(cx);
});
@@ -1295,10 +1302,16 @@ impl ConversationView {
}
AcpThreadEvent::Stopped(stop_reason) => {
if let Some(active) = self.thread_view(&thread_id) {
+ let is_generating =
+ matches!(thread.read(cx).status(), ThreadStatus::Generating);
active.update(cx, |active, cx| {
- active.thread_retry_status.take();
- active.clear_auto_expand_tracking();
- active.list_state.set_follow_tail(false);
+ if !is_generating {
+ active.thread_retry_status.take();
+ active.clear_auto_expand_tracking();
+ if active.list_state.is_following_tail() {
+ active.list_state.scroll_to_end();
+ }
+ }
active.sync_generating_indicator(cx);
});
}
@@ -1367,9 +1380,15 @@ impl ConversationView {
}
AcpThreadEvent::Error => {
if let Some(active) = self.thread_view(&thread_id) {
+ let is_generating =
+ matches!(thread.read(cx).status(), ThreadStatus::Generating);
active.update(cx, |active, cx| {
- active.thread_retry_status.take();
- active.list_state.set_follow_tail(false);
+ if !is_generating {
+ active.thread_retry_status.take();
+ if active.list_state.is_following_tail() {
+ active.list_state.scroll_to_end();
+ }
+ }
active.sync_generating_indicator(cx);
});
}
@@ -1429,40 +1448,24 @@ impl ConversationView {
self.emit_token_limit_telemetry_if_needed(thread, cx);
}
AcpThreadEvent::AvailableCommandsUpdated(available_commands) => {
- let mut available_commands = available_commands.clone();
-
- if thread
- .read(cx)
- .connection()
- .auth_methods()
- .iter()
- .any(|method| method.id().0.as_ref() == "claude-login")
- {
- available_commands.push(acp::AvailableCommand::new("login", "Authenticate"));
- available_commands.push(acp::AvailableCommand::new("logout", "Authenticate"));
- }
+ if let Some(thread_view) = self.thread_view(&thread_id) {
+ let has_commands = !available_commands.is_empty();
- let has_commands = !available_commands.is_empty();
- if let Some(active) = self.active_thread() {
- active.update(cx, |active, _cx| {
- active
- .session_capabilities
- .write()
- .set_available_commands(available_commands);
- });
- }
-
- let agent_display_name = self
- .agent_server_store
- .read(cx)
- .agent_display_name(&self.agent.agent_id())
- .unwrap_or_else(|| self.agent.agent_id().0.to_string().into());
+ let agent_display_name = self
+ .agent_server_store
+ .read(cx)
+ .agent_display_name(&self.agent.agent_id())
+ .unwrap_or_else(|| self.agent.agent_id().0.to_string().into());
- if let Some(active) = self.active_thread() {
let new_placeholder =
placeholder_text(agent_display_name.as_ref(), has_commands);
- active.update(cx, |active, cx| {
- active.message_editor.update(cx, |editor, cx| {
+
+ thread_view.update(cx, |thread_view, cx| {
+ thread_view
+ .session_capabilities
+ .write()
+ .set_available_commands(available_commands.clone());
+ thread_view.message_editor.update(cx, |editor, cx| {
editor.set_placeholder_text(&new_placeholder, window, cx);
});
});
@@ -2329,9 +2332,9 @@ impl ConversationView {
}
}
+ #[cfg(feature = "audio")]
fn play_notification_sound(&self, window: &Window, cx: &mut App) {
- let settings = AgentSettings::get_global(cx);
- let _visible = window.is_window_active()
+ let visible = window.is_window_active()
&& if let Some(mw) = window.root::<MultiWorkspace>().flatten() {
self.agent_panel_visible(&mw, cx)
} else {
@@ -2339,8 +2342,8 @@ impl ConversationView {
.upgrade()
.is_some_and(|workspace| AgentPanel::is_visible(&workspace, cx))
};
- #[cfg(feature = "audio")]
- if settings.play_sound_when_agent_done && !_visible {
+ let settings = AgentSettings::get_global(cx);
+ if settings.play_sound_when_agent_done.should_play(visible) {
Audio::play_sound(Sound::AgentDone, cx);
}
}
@@ -2970,6 +2973,166 @@ pub(crate) mod tests {
});
}
+ #[derive(Clone)]
+ struct RestoredAvailableCommandsConnection;
+
+ impl AgentConnection for RestoredAvailableCommandsConnection {
+ fn agent_id(&self) -> AgentId {
+ AgentId::new("restored-available-commands")
+ }
+
+ fn telemetry_id(&self) -> SharedString {
+ "restored-available-commands".into()
+ }
+
+ fn new_session(
+ self: Rc<Self>,
+ project: Entity<Project>,
+ _work_dirs: PathList,
+ cx: &mut App,
+ ) -> Task<gpui::Result<Entity<AcpThread>>> {
+ let thread = build_test_thread(
+ self,
+ project,
+ "RestoredAvailableCommandsConnection",
+ SessionId::new("new-session"),
+ cx,
+ );
+ Task::ready(Ok(thread))
+ }
+
+ fn supports_load_session(&self) -> bool {
+ true
+ }
+
+ fn load_session(
+ self: Rc<Self>,
+ session_id: acp::SessionId,
+ project: Entity<Project>,
+ _work_dirs: PathList,
+ _title: Option<SharedString>,
+ cx: &mut App,
+ ) -> Task<gpui::Result<Entity<AcpThread>>> {
+ let thread = build_test_thread(
+ self,
+ project,
+ "RestoredAvailableCommandsConnection",
+ session_id,
+ cx,
+ );
+
+ thread
+ .update(cx, |thread, cx| {
+ thread.handle_session_update(
+ acp::SessionUpdate::AvailableCommandsUpdate(
+ acp::AvailableCommandsUpdate::new(vec![acp::AvailableCommand::new(
+ "help", "Get help",
+ )]),
+ ),
+ cx,
+ )
+ })
+ .expect("available commands update should succeed");
+
+ Task::ready(Ok(thread))
+ }
+
+ fn auth_methods(&self) -> &[acp::AuthMethod] {
+ &[]
+ }
+
+ fn authenticate(
+ &self,
+ _method_id: acp::AuthMethodId,
+ _cx: &mut App,
+ ) -> Task<gpui::Result<()>> {
+ Task::ready(Ok(()))
+ }
+
+ fn prompt(
+ &self,
+ _id: Option<acp_thread::UserMessageId>,
+ _params: acp::PromptRequest,
+ _cx: &mut App,
+ ) -> Task<gpui::Result<acp::PromptResponse>> {
+ Task::ready(Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)))
+ }
+
+ fn cancel(&self, _session_id: &acp::SessionId, _cx: &mut App) {}
+
+ fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
+ self
+ }
+ }
+
+ #[gpui::test]
+ async fn test_restored_threads_keep_available_commands(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ let project = Project::test(fs, [], cx).await;
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
+
+ let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx)));
+ let connection_store =
+ cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx)));
+
+ let conversation_view = cx.update(|window, cx| {
+ cx.new(|cx| {
+ ConversationView::new(
+ Rc::new(StubAgentServer::new(RestoredAvailableCommandsConnection)),
+ connection_store,
+ Agent::Custom { id: "Test".into() },
+ Some(SessionId::new("restored-session")),
+ None,
+ None,
+ None,
+ workspace.downgrade(),
+ project,
+ Some(thread_store),
+ None,
+ window,
+ cx,
+ )
+ })
+ });
+
+ cx.run_until_parked();
+
+ let message_editor = message_editor(&conversation_view, cx);
+ let editor =
+ message_editor.update(cx, |message_editor, _cx| message_editor.editor().clone());
+ let placeholder = editor.update(cx, |editor, cx| editor.placeholder_text(cx));
+
+ active_thread(&conversation_view, cx).read_with(cx, |view, _cx| {
+ let available_commands = view
+ .session_capabilities
+ .read()
+ .available_commands()
+ .to_vec();
+ assert_eq!(available_commands.len(), 1);
+ assert_eq!(available_commands[0].name.as_str(), "help");
+ assert_eq!(available_commands[0].description.as_str(), "Get help");
+ });
+
+ assert_eq!(
+ placeholder,
+ Some("Message Test — @ to include context, / for commands".to_string())
+ );
+
+ message_editor.update_in(cx, |editor, window, cx| {
+ editor.set_text("/help", window, cx);
+ });
+
+ let contents_result = message_editor
+ .update(cx, |editor, cx| editor.contents(false, cx))
+ .await;
+
+ assert!(contents_result.is_ok());
+ }
+
#[gpui::test]
async fn test_resume_thread_uses_session_cwd_when_inside_project(cx: &mut TestAppContext) {
init_test(cx);
@@ -3356,7 +3519,6 @@ pub(crate) mod tests {
// Verify workspace1 is no longer the active workspace
multi_workspace_handle
.read_with(cx, |mw, _cx| {
- assert_eq!(mw.active_workspace_index(), 1);
assert_ne!(mw.workspace(), &workspace1);
})
.unwrap();
@@ -4851,6 +5013,63 @@ pub(crate) mod tests {
});
}
+ #[gpui::test]
+ async fn test_stale_stop_does_not_disable_follow_tail_during_regenerate(
+ cx: &mut TestAppContext,
+ ) {
+ init_test(cx);
+
+ let connection = StubAgentConnection::new();
+
+ let (conversation_view, cx) =
+ setup_conversation_view(StubAgentServer::new(connection.clone()), cx).await;
+ add_to_workspace(conversation_view.clone(), cx);
+
+ let message_editor = message_editor(&conversation_view, cx);
+ message_editor.update_in(cx, |editor, window, cx| {
+ editor.set_text("Original message to edit", window, cx);
+ });
+ active_thread(&conversation_view, cx)
+ .update_in(cx, |view, window, cx| view.send(window, cx));
+
+ cx.run_until_parked();
+
+ let user_message_editor = conversation_view.read_with(cx, |view, cx| {
+ view.active_thread()
+ .map(|active| &active.read(cx).entry_view_state)
+ .as_ref()
+ .unwrap()
+ .read(cx)
+ .entry(0)
+ .unwrap()
+ .message_editor()
+ .unwrap()
+ .clone()
+ });
+
+ cx.focus(&user_message_editor);
+ user_message_editor.update_in(cx, |editor, window, cx| {
+ editor.set_text("Edited message content", window, cx);
+ });
+
+ user_message_editor.update_in(cx, |_editor, window, cx| {
+ window.dispatch_action(Box::new(Chat), cx);
+ });
+
+ cx.run_until_parked();
+
+ conversation_view.read_with(cx, |view, cx| {
+ let active = view.active_thread().unwrap();
+ let active = active.read(cx);
+
+ assert_eq!(active.thread.read(cx).status(), ThreadStatus::Generating);
+ assert!(
+ active.list_state.is_following_tail(),
+ "stale stop events from the cancelled turn must not disable follow-tail for the new turn"
+ );
+ });
+ }
+
struct GeneratingThreadSetup {
conversation_view: Entity<ConversationView>,
thread: Entity<AcpThread>,
@@ -344,7 +344,8 @@ impl ThreadView {
) -> Self {
let id = thread.read(cx).session_id().clone();
- let placeholder = placeholder_text(agent_display_name.as_ref(), false);
+ let has_commands = !session_capabilities.read().available_commands().is_empty();
+ let placeholder = placeholder_text(agent_display_name.as_ref(), has_commands);
let history_subscription = history.as_ref().map(|h| {
cx.observe(h, |this, history, cx| {
@@ -541,31 +542,15 @@ impl ThreadView {
let thread_view = cx.entity().downgrade();
this.list_state
- .set_scroll_handler(move |event, _window, cx| {
+ .set_scroll_handler(move |_event, _window, cx| {
let list_state = list_state_for_scroll.clone();
let thread_view = thread_view.clone();
- let is_following_tail = event.is_following_tail;
// N.B. We must defer because the scroll handler is called while the
// ListState's RefCell is mutably borrowed. Reading logical_scroll_top()
// directly would panic from a double borrow.
cx.defer(move |cx| {
let scroll_top = list_state.logical_scroll_top();
let _ = thread_view.update(cx, |this, cx| {
- if !is_following_tail {
- let is_at_bottom = {
- let current_offset =
- list_state.scroll_px_offset_for_scrollbar().y.abs();
- let max_offset = list_state.max_offset_for_scrollbar().y;
- current_offset >= max_offset - px(1.0)
- };
-
- let is_generating =
- matches!(this.thread.read(cx).status(), ThreadStatus::Generating);
-
- if is_at_bottom && is_generating {
- list_state.set_follow_tail(true);
- }
- }
if let Some(thread) = this.as_native_thread(cx) {
thread.update(cx, |thread, _cx| {
thread.set_ui_scroll_position(Some(scroll_top));
@@ -832,13 +817,10 @@ impl ThreadView {
}
}
}));
- if self.parent_id.is_none() {
- self.suppress_merge_conflict_notification(cx);
- }
generation
}
- pub fn stop_turn(&mut self, generation: usize, cx: &mut Context<Self>) {
+ pub fn stop_turn(&mut self, generation: usize, _cx: &mut Context<Self>) {
if self.turn_fields.turn_generation != generation {
return;
}
@@ -849,25 +831,6 @@ impl ThreadView {
.map(|started| started.elapsed());
self.turn_fields.last_turn_tokens = self.turn_fields.turn_tokens.take();
self.turn_fields._turn_timer_task = None;
- if self.parent_id.is_none() {
- self.unsuppress_merge_conflict_notification(cx);
- }
- }
-
- fn suppress_merge_conflict_notification(&self, cx: &mut Context<Self>) {
- self.workspace
- .update(cx, |workspace, cx| {
- workspace.suppress_notification(&workspace::merge_conflict_notification_id(), cx);
- })
- .ok();
- }
-
- fn unsuppress_merge_conflict_notification(&self, cx: &mut Context<Self>) {
- self.workspace
- .update(cx, |workspace, _cx| {
- workspace.unsuppress(workspace::merge_conflict_notification_id());
- })
- .ok();
}
pub fn update_turn_tokens(&mut self, cx: &App) {
@@ -1077,7 +1040,7 @@ impl ThreadView {
})?;
let _ = this.update(cx, |this, cx| {
- this.list_state.set_follow_tail(true);
+ this.list_state.scroll_to_end();
cx.notify();
});
@@ -4978,6 +4941,105 @@ impl ThreadView {
cx.notify();
}
+ fn scroll_output_page_up(
+ &mut self,
+ _: &ScrollOutputPageUp,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let page_height = self.list_state.viewport_bounds().size.height;
+ self.list_state.scroll_by(-page_height * 0.9);
+ cx.notify();
+ }
+
+ fn scroll_output_page_down(
+ &mut self,
+ _: &ScrollOutputPageDown,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let page_height = self.list_state.viewport_bounds().size.height;
+ self.list_state.scroll_by(page_height * 0.9);
+ cx.notify();
+ }
+
+ fn scroll_output_line_up(
+ &mut self,
+ _: &ScrollOutputLineUp,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.list_state.scroll_by(-window.line_height() * 3.);
+ cx.notify();
+ }
+
+ fn scroll_output_line_down(
+ &mut self,
+ _: &ScrollOutputLineDown,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.list_state.scroll_by(window.line_height() * 3.);
+ cx.notify();
+ }
+
+ fn scroll_output_to_top(
+ &mut self,
+ _: &ScrollOutputToTop,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.scroll_to_top(cx);
+ }
+
+ fn scroll_output_to_bottom(
+ &mut self,
+ _: &ScrollOutputToBottom,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.scroll_to_end(cx);
+ }
+
+ fn scroll_output_to_previous_message(
+ &mut self,
+ _: &ScrollOutputToPreviousMessage,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let entries = self.thread.read(cx).entries();
+ let current_ix = self.list_state.logical_scroll_top().item_ix;
+ if let Some(target_ix) = (0..current_ix)
+ .rev()
+ .find(|&i| matches!(entries.get(i), Some(AgentThreadEntry::UserMessage(_))))
+ {
+ self.list_state.scroll_to(ListOffset {
+ item_ix: target_ix,
+ offset_in_item: px(0.),
+ });
+ cx.notify();
+ }
+ }
+
+ fn scroll_output_to_next_message(
+ &mut self,
+ _: &ScrollOutputToNextMessage,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let entries = self.thread.read(cx).entries();
+ let current_ix = self.list_state.logical_scroll_top().item_ix;
+ if let Some(target_ix) = (current_ix + 1..entries.len())
+ .find(|&i| matches!(entries.get(i), Some(AgentThreadEntry::UserMessage(_))))
+ {
+ self.list_state.scroll_to(ListOffset {
+ item_ix: target_ix,
+ offset_in_item: px(0.),
+ });
+ cx.notify();
+ }
+ }
+
pub fn open_thread_as_markdown(
&self,
workspace: Entity<Workspace>,
@@ -5207,9 +5269,12 @@ impl ThreadView {
match thinking_display {
ThinkingBlockDisplay::Auto => {
- if self.expanded_thinking_blocks.contains(&key) {
+ let is_open = self.expanded_thinking_blocks.contains(&key)
+ || self.user_toggled_thinking_blocks.contains(&key);
+
+ if is_open {
self.expanded_thinking_blocks.remove(&key);
- self.user_toggled_thinking_blocks.insert(key);
+ self.user_toggled_thinking_blocks.remove(&key);
} else {
self.expanded_thinking_blocks.insert(key);
self.user_toggled_thinking_blocks.insert(key);
@@ -7126,17 +7191,10 @@ impl ThreadView {
};
active_editor.update_in(cx, |editor, window, cx| {
- let singleton = editor
- .buffer()
- .read(cx)
- .read(cx)
- .as_singleton()
- .map(|(a, b, _)| (a, b));
- if let Some((excerpt_id, buffer_id)) = singleton
- && let Some(agent_buffer) = agent_location.buffer.upgrade()
- && agent_buffer.read(cx).remote_id() == buffer_id
+ let snapshot = editor.buffer().read(cx).snapshot(cx);
+ if snapshot.as_singleton().is_some()
+ && let Some(anchor) = snapshot.anchor_in_excerpt(agent_location.position)
{
- let anchor = editor::Anchor::in_buffer(excerpt_id, agent_location.position);
editor.change_selections(Default::default(), window, cx, |selections| {
selections.select_anchor_ranges([anchor..anchor]);
})
@@ -7332,9 +7390,8 @@ impl ThreadView {
.gap_2()
.map(|this| {
if card_layout {
- this.when(context_ix > 0, |this| {
- this.pt_2()
- .border_t_1()
+ this.p_2().when(context_ix > 0, |this| {
+ this.border_t_1()
.border_color(self.tool_card_border_color(cx))
})
} else {
@@ -8545,6 +8602,14 @@ impl Render for ThreadView {
.on_action(cx.listener(Self::handle_toggle_command_pattern))
.on_action(cx.listener(Self::open_permission_dropdown))
.on_action(cx.listener(Self::open_add_context_menu))
+ .on_action(cx.listener(Self::scroll_output_page_up))
+ .on_action(cx.listener(Self::scroll_output_page_down))
+ .on_action(cx.listener(Self::scroll_output_line_up))
+ .on_action(cx.listener(Self::scroll_output_line_down))
+ .on_action(cx.listener(Self::scroll_output_to_top))
+ .on_action(cx.listener(Self::scroll_output_to_bottom))
+ .on_action(cx.listener(Self::scroll_output_to_previous_message))
+ .on_action(cx.listener(Self::scroll_output_to_next_message))
.on_action(cx.listener(|this, _: &ToggleFastMode, _window, cx| {
this.toggle_fast_mode(cx);
}))
@@ -8732,7 +8797,7 @@ pub(crate) fn open_link(
.open_path(path, None, true, window, cx)
.detach_and_log_err(cx);
}
- MentionUri::PastedImage => {}
+ MentionUri::PastedImage { .. } => {}
MentionUri::Directory { abs_path } => {
let project = workspace.project();
let Some(entry_id) = project.update(cx, |project, cx| {
@@ -27,8 +27,8 @@ use editor::RowExt;
use editor::SelectionEffects;
use editor::scroll::ScrollOffset;
use editor::{
- Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, ExcerptId, HighlightKey,
- MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint,
+ Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, HighlightKey, MultiBuffer,
+ MultiBufferSnapshot, ToOffset as _, ToPoint,
actions::SelectAll,
display_map::{
BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, EditorMargins,
@@ -443,15 +443,17 @@ impl InlineAssistant {
let newest_selection = newest_selection.unwrap();
let mut codegen_ranges = Vec::new();
- for (buffer, buffer_range, excerpt_id) in
- snapshot.ranges_to_buffer_ranges(selections.iter().map(|selection| {
- snapshot.anchor_before(selection.start)..snapshot.anchor_after(selection.end)
- }))
+ for (buffer, buffer_range, _) in selections
+ .iter()
+ .flat_map(|selection| snapshot.range_to_buffer_ranges(selection.start..selection.end))
{
- let anchor_range = Anchor::range_in_buffer(
- excerpt_id,
- buffer.anchor_before(buffer_range.start)..buffer.anchor_after(buffer_range.end),
- );
+ let (Some(start), Some(end)) = (
+ snapshot.anchor_in_buffer(buffer.anchor_before(buffer_range.start)),
+ snapshot.anchor_in_buffer(buffer.anchor_after(buffer_range.end)),
+ ) else {
+ continue;
+ };
+ let anchor_range = start..end;
codegen_ranges.push(anchor_range);
@@ -982,8 +984,7 @@ impl InlineAssistant {
match event {
EditorEvent::Edited { transaction_id } => {
let buffer = editor.read(cx).buffer().read(cx);
- let edited_ranges =
- buffer.edited_ranges_for_transaction::<MultiBufferOffset>(*transaction_id, cx);
+ let edited_ranges = buffer.edited_ranges_for_transaction(*transaction_id, cx);
let snapshot = buffer.snapshot(cx);
for assist_id in editor_assists.assist_ids.clone() {
@@ -1089,7 +1090,7 @@ impl InlineAssistant {
let multibuffer = editor.read(cx).buffer().read(cx);
let snapshot = multibuffer.snapshot(cx);
let ranges =
- snapshot.range_to_buffer_ranges(assist.range.start..=assist.range.end);
+ snapshot.range_to_buffer_ranges(assist.range.start..assist.range.end);
ranges
.first()
.and_then(|(buffer, _, _)| buffer.language())
@@ -1496,10 +1497,10 @@ impl InlineAssistant {
let mut new_blocks = Vec::new();
for (new_row, old_row_range) in deleted_row_ranges {
- let (_, start, _) = old_snapshot
+ let (_, start) = old_snapshot
.point_to_buffer_point(Point::new(*old_row_range.start(), 0))
.unwrap();
- let (_, end, _) = old_snapshot
+ let (_, end) = old_snapshot
.point_to_buffer_point(Point::new(
*old_row_range.end(),
old_snapshot.line_len(MultiBufferRow(*old_row_range.end())),
@@ -1530,7 +1531,7 @@ impl InlineAssistant {
editor.set_read_only(true);
editor.set_show_edit_predictions(Some(false), window, cx);
editor.highlight_rows::<DeletedLines>(
- Anchor::min()..Anchor::max(),
+ Anchor::Min..Anchor::Max,
cx.theme().status().deleted_background,
Default::default(),
cx,
@@ -1938,9 +1939,8 @@ impl CodeActionProvider for AssistantCodeActionProvider {
fn apply_code_action(
&self,
- buffer: Entity<Buffer>,
+ _buffer: Entity<Buffer>,
action: CodeAction,
- excerpt_id: ExcerptId,
_push_to_history: bool,
window: &mut Window,
cx: &mut App,
@@ -1970,31 +1970,8 @@ impl CodeActionProvider for AssistantCodeActionProvider {
let range = editor
.update(cx, |editor, cx| {
editor.buffer().update(cx, |multibuffer, cx| {
- let buffer = buffer.read(cx);
- let multibuffer_snapshot = multibuffer.read(cx);
-
- let old_context_range =
- multibuffer_snapshot.context_range_for_excerpt(excerpt_id)?;
- let mut new_context_range = old_context_range.clone();
- if action
- .range
- .start
- .cmp(&old_context_range.start, buffer)
- .is_lt()
- {
- new_context_range.start = action.range.start;
- }
- if action.range.end.cmp(&old_context_range.end, buffer).is_gt() {
- new_context_range.end = action.range.end;
- }
- drop(multibuffer_snapshot);
-
- if new_context_range != old_context_range {
- multibuffer.resize_excerpt(excerpt_id, new_context_range, cx);
- }
-
let multibuffer_snapshot = multibuffer.read(cx);
- multibuffer_snapshot.anchor_range_in_excerpt(excerpt_id, action.range)
+ multibuffer_snapshot.buffer_anchor_range_to_anchor_range(action.range)
})
})
.context("invalid range")?;
@@ -2048,7 +2025,7 @@ fn merge_ranges(ranges: &mut Vec<Range<Anchor>>, buffer: &MultiBufferSnapshot) {
pub mod evals {
use crate::InlineAssistant;
use agent::ThreadStore;
- use client::{Client, UserStore};
+ use client::{Client, RefreshLlmTokenListener, UserStore};
use editor::{Editor, MultiBuffer, MultiBufferOffset};
use eval_utils::{EvalOutput, NoProcessor};
use fs::FakeFs;
@@ -2114,7 +2091,8 @@ pub mod evals {
client::init(&client, cx);
workspace::init(app_state.clone(), cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
- language_model::init(user_store.clone(), client.clone(), cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
language_models::init(user_store, client.clone(), cx);
cx.set_global(inline_assistant);
@@ -6,7 +6,7 @@ use agent_servers::{AgentServer, AgentServerDelegate};
use anyhow::{Context as _, Result, anyhow};
use collections::{HashMap, HashSet};
use editor::{
- Anchor, Editor, EditorSnapshot, ExcerptId, FoldPlaceholder, ToOffset,
+ Anchor, Editor, EditorSnapshot, FoldPlaceholder, ToOffset,
display_map::{Crease, CreaseId, CreaseMetadata, FoldId},
scroll::Autoscroll,
};
@@ -154,7 +154,7 @@ impl MentionSet {
MentionUri::Selection { abs_path: None, .. } => Task::ready(Err(anyhow!(
"Untitled buffer selection mentions are not supported for paste"
))),
- MentionUri::PastedImage
+ MentionUri::PastedImage { .. }
| MentionUri::TerminalSelection { .. }
| MentionUri::MergeConflict { .. } => {
Task::ready(Err(anyhow!("Unsupported mention URI type for paste")))
@@ -204,10 +204,9 @@ impl MentionSet {
};
let snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx));
- let Some(start_anchor) = snapshot.buffer_snapshot().as_singleton_anchor(start) else {
+ let Some(start_anchor) = snapshot.buffer_snapshot().anchor_in_excerpt(start) else {
return Task::ready(());
};
- let excerpt_id = start_anchor.excerpt_id;
let end_anchor = snapshot.buffer_snapshot().anchor_before(
start_anchor.to_offset(&snapshot.buffer_snapshot()) + content_len + 1usize,
);
@@ -234,7 +233,6 @@ impl MentionSet {
})
.shared();
insert_crease_for_mention(
- excerpt_id,
start,
content_len,
mention_uri.name().into(),
@@ -249,7 +247,6 @@ impl MentionSet {
)
} else {
insert_crease_for_mention(
- excerpt_id,
start,
content_len,
crease_text,
@@ -286,7 +283,7 @@ impl MentionSet {
include_errors,
include_warnings,
} => self.confirm_mention_for_diagnostics(include_errors, include_warnings, cx),
- MentionUri::PastedImage => {
+ MentionUri::PastedImage { .. } => {
debug_panic!("pasted image URI should not be included in completions");
Task::ready(Err(anyhow!(
"pasted imaged URI should not be included in completions"
@@ -468,7 +465,7 @@ impl MentionSet {
};
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);
- let Some(start) = snapshot.as_singleton_anchor(source_range.start) else {
+ let Some(start) = snapshot.anchor_in_excerpt(source_range.start) else {
return;
};
@@ -742,22 +739,22 @@ pub(crate) async fn insert_images_as_context(
return;
}
- let replacement_text = MentionUri::PastedImage.as_link().to_string();
-
for (image, name) in images {
- let Some((excerpt_id, text_anchor, multibuffer_anchor)) = editor
+ let mention_uri = MentionUri::PastedImage {
+ name: name.to_string(),
+ };
+ let replacement_text = mention_uri.as_link().to_string();
+ let Some((text_anchor, multibuffer_anchor)) = editor
.update_in(cx, |editor, window, cx| {
let snapshot = editor.snapshot(window, cx);
- let (excerpt_id, _, buffer_snapshot) =
- snapshot.buffer_snapshot().as_singleton().unwrap();
-
- let cursor_anchor = editor.selections.newest_anchor().start.text_anchor;
- let text_anchor = cursor_anchor.bias_left(&buffer_snapshot);
- let multibuffer_anchor = snapshot
+ let (cursor_anchor, buffer_snapshot) = snapshot
.buffer_snapshot()
- .anchor_in_excerpt(excerpt_id, text_anchor);
+ .anchor_to_buffer_anchor(editor.selections.newest_anchor().start)
+ .unwrap();
+ let text_anchor = cursor_anchor.bias_left(buffer_snapshot);
+ let multibuffer_anchor = snapshot.buffer_snapshot().anchor_in_excerpt(text_anchor);
editor.insert(&format!("{replacement_text} "), window, cx);
- (excerpt_id, text_anchor, multibuffer_anchor)
+ (text_anchor, multibuffer_anchor)
})
.ok()
else {
@@ -775,7 +772,6 @@ pub(crate) async fn insert_images_as_context(
let image = Arc::new(image);
let Ok(Some((crease_id, tx))) = cx.update(|window, cx| {
insert_crease_for_mention(
- excerpt_id,
text_anchor,
content_len,
name.clone(),
@@ -810,7 +806,13 @@ pub(crate) async fn insert_images_as_context(
.shared();
mention_set.update(cx, |mention_set, _cx| {
- mention_set.insert_mention(crease_id, MentionUri::PastedImage, task.clone())
+ mention_set.insert_mention(
+ crease_id,
+ MentionUri::PastedImage {
+ name: name.to_string(),
+ },
+ task.clone(),
+ )
});
if task
@@ -879,7 +881,7 @@ pub(crate) fn paste_images_as_context(
Some(window.spawn(cx, async move |mut cx| {
use itertools::Itertools;
- let default_name: SharedString = MentionUri::PastedImage.name().into();
+ let default_name: SharedString = "Image".into();
let (mut images, paths): (Vec<(gpui::Image, SharedString)>, Vec<_>) = clipboard
.into_entries()
.filter_map(|entry| match entry {
@@ -909,7 +911,6 @@ pub(crate) fn paste_images_as_context(
}
pub(crate) fn insert_crease_for_mention(
- excerpt_id: ExcerptId,
anchor: text::Anchor,
content_len: usize,
crease_label: SharedString,
@@ -927,7 +928,7 @@ pub(crate) fn insert_crease_for_mention(
let crease_id = editor.update(cx, |editor, cx| {
let snapshot = editor.buffer().read(cx).snapshot(cx);
- let start = snapshot.anchor_in_excerpt(excerpt_id, anchor)?;
+ let start = snapshot.anchor_in_excerpt(anchor)?;
let start = start.bias_right(&snapshot);
let end = snapshot.anchor_before(start.to_offset(&snapshot) + content_len);
@@ -203,12 +203,10 @@ fn insert_mention_for_project_path(
MentionInsertPosition::AtCursor => editor.update(cx, |editor, cx| {
let buffer = editor.buffer().read(cx);
let snapshot = buffer.snapshot(cx);
- let (_, _, buffer_snapshot) = snapshot.as_singleton()?;
- let text_anchor = editor
- .selections
- .newest_anchor()
- .start
- .text_anchor
+ let buffer_snapshot = snapshot.as_singleton()?;
+ let text_anchor = snapshot
+ .anchor_to_buffer_anchor(editor.selections.newest_anchor().start)?
+ .0
.bias_left(&buffer_snapshot);
editor.insert(&mention_text, window, cx);
@@ -224,7 +222,7 @@ fn insert_mention_for_project_path(
editor.update(cx, |editor, cx| {
editor.edit(
[(
- multi_buffer::Anchor::max()..multi_buffer::Anchor::max(),
+ multi_buffer::Anchor::Max..multi_buffer::Anchor::Max,
new_text,
)],
cx,
@@ -263,7 +261,7 @@ async fn resolve_pasted_context_items(
) -> (Vec<ResolvedPastedContextItem>, Vec<Entity<Worktree>>) {
let mut items = Vec::new();
let mut added_worktrees = Vec::new();
- let default_image_name: SharedString = MentionUri::PastedImage.name().into();
+ let default_image_name: SharedString = "Image".into();
for entry in entries {
match entry {
@@ -603,7 +601,7 @@ impl MessageEditor {
COMMAND_HINT_INLAY_ID,
hint_pos,
&InlayHint {
- position: hint_pos.text_anchor,
+ position: snapshot.anchor_to_buffer_anchor(hint_pos)?.0,
label: InlayHintLabel::String(hint),
kind: Some(InlayHintKind::Parameter),
padding_left: false,
@@ -640,12 +638,11 @@ impl MessageEditor {
let start = self.editor.update(cx, |editor, cx| {
editor.set_text(content, window, cx);
- editor
- .buffer()
- .read(cx)
- .snapshot(cx)
- .anchor_before(Point::zero())
- .text_anchor
+ let snapshot = editor.buffer().read(cx).snapshot(cx);
+ snapshot
+ .anchor_to_buffer_anchor(snapshot.anchor_before(Point::zero()))
+ .unwrap()
+ .0
});
let supports_images = self.session_capabilities.read().supports_images();
@@ -815,7 +812,9 @@ impl MessageEditor {
)
.uri(match uri {
MentionUri::File { .. } => Some(uri.to_uri().to_string()),
- MentionUri::PastedImage => None,
+ MentionUri::PastedImage { .. } => {
+ Some(uri.to_uri().to_string())
+ }
other => {
debug_panic!(
"unexpected mention uri for image: {:?}",
@@ -999,13 +998,10 @@ impl MessageEditor {
if should_insert_creases && let Some(selections) = editor_clipboard_selections {
cx.stop_propagation();
- let insertion_target = self
- .editor
- .read(cx)
- .selections
- .newest_anchor()
- .start
- .text_anchor;
+ let snapshot = self.editor.read(cx).buffer().read(cx).snapshot(cx);
+ let (insertion_target, _) = snapshot
+ .anchor_to_buffer_anchor(self.editor.read(cx).selections.newest_anchor().start)
+ .unwrap();
let project = workspace.read(cx).project().clone();
for selection in selections {
@@ -1021,21 +1017,19 @@ impl MessageEditor {
};
let mention_text = mention_uri.as_link().to_string();
- let (excerpt_id, text_anchor, content_len) =
- self.editor.update(cx, |editor, cx| {
- let buffer = editor.buffer().read(cx);
- let snapshot = buffer.snapshot(cx);
- let (excerpt_id, _, buffer_snapshot) = snapshot.as_singleton().unwrap();
- let text_anchor = insertion_target.bias_left(&buffer_snapshot);
+ let (text_anchor, content_len) = self.editor.update(cx, |editor, cx| {
+ let buffer = editor.buffer().read(cx);
+ let snapshot = buffer.snapshot(cx);
+ let buffer_snapshot = snapshot.as_singleton().unwrap();
+ let text_anchor = insertion_target.bias_left(&buffer_snapshot);
- editor.insert(&mention_text, window, cx);
- editor.insert(" ", window, cx);
+ editor.insert(&mention_text, window, cx);
+ editor.insert(" ", window, cx);
- (excerpt_id, text_anchor, mention_text.len())
- });
+ (text_anchor, mention_text.len())
+ });
let Some((crease_id, tx)) = insert_crease_for_mention(
- excerpt_id,
text_anchor,
content_len,
crease_text.into(),
@@ -1145,8 +1139,7 @@ impl MessageEditor {
for (anchor, content_len, mention_uri) in all_mentions {
let Some((crease_id, tx)) = insert_crease_for_mention(
- anchor.excerpt_id,
- anchor.text_anchor,
+ snapshot.anchor_to_buffer_anchor(anchor).unwrap().0,
content_len,
mention_uri.name().into(),
mention_uri.icon_path(cx),
@@ -1339,25 +1332,23 @@ impl MessageEditor {
};
let mention_text = mention_uri.as_link().to_string();
- let (excerpt_id, text_anchor, content_len) = editor.update(cx, |editor, cx| {
+ let (text_anchor, content_len) = editor.update(cx, |editor, cx| {
let buffer = editor.buffer().read(cx);
let snapshot = buffer.snapshot(cx);
- let (excerpt_id, _, buffer_snapshot) = snapshot.as_singleton().unwrap();
- let text_anchor = editor
- .selections
- .newest_anchor()
- .start
- .text_anchor
+ let buffer_snapshot = snapshot.as_singleton().unwrap();
+ let text_anchor = snapshot
+ .anchor_to_buffer_anchor(editor.selections.newest_anchor().start)
+ .unwrap()
+ .0
.bias_left(&buffer_snapshot);
editor.insert(&mention_text, window, cx);
editor.insert(" ", window, cx);
- (excerpt_id, text_anchor, mention_text.len())
+ (text_anchor, mention_text.len())
});
let Some((crease_id, tx)) = insert_crease_for_mention(
- excerpt_id,
text_anchor,
content_len,
mention_uri.name().into(),
@@ -1649,7 +1640,9 @@ impl MessageEditor {
let mention_uri = if let Some(uri) = uri {
MentionUri::parse(&uri, path_style)
} else {
- Ok(MentionUri::PastedImage)
+ Ok(MentionUri::PastedImage {
+ name: "Image".to_string(),
+ })
};
let Some(mention_uri) = mention_uri.log_err() else {
continue;
@@ -1700,8 +1693,7 @@ impl MessageEditor {
let adjusted_start = insertion_start + range.start;
let anchor = snapshot.anchor_before(MultiBufferOffset(adjusted_start));
let Some((crease_id, tx)) = insert_crease_for_mention(
- anchor.excerpt_id,
- anchor.text_anchor,
+ snapshot.anchor_to_buffer_anchor(anchor).unwrap().0,
range.end - range.start,
mention_uri.name().into(),
mention_uri.icon_path(cx),
@@ -2077,23 +2069,13 @@ mod tests {
cx.run_until_parked();
- let excerpt_id = editor.update(cx, |editor, cx| {
- editor
- .buffer()
- .read(cx)
- .excerpt_ids()
- .into_iter()
- .next()
- .unwrap()
- });
let completions = editor.update_in(cx, |editor, window, cx| {
editor.set_text("Hello @file ", window, cx);
let buffer = editor.buffer().read(cx).as_singleton().unwrap();
let completion_provider = editor.completion_provider().unwrap();
completion_provider.completions(
- excerpt_id,
&buffer,
- text::Anchor::MAX,
+ text::Anchor::max_for_buffer(buffer.read(cx).remote_id()),
CompletionContext {
trigger_kind: CompletionTriggerKind::TRIGGER_CHARACTER,
trigger_character: Some("@".into()),
@@ -2114,7 +2096,7 @@ mod tests {
editor.update_in(cx, |editor, window, cx| {
let snapshot = editor.buffer().read(cx).snapshot(cx);
let range = snapshot
- .anchor_range_in_excerpt(excerpt_id, completion.replace_range)
+ .buffer_anchor_range_to_anchor_range(completion.replace_range)
.unwrap();
editor.edit([(range, completion.new_text)], cx);
(completion.confirm.unwrap())(CompletionIntent::Complete, window, cx);
@@ -4096,6 +4078,11 @@ mod tests {
&mut cx,
);
+ let image_name = temporary_image_path
+ .file_name()
+ .and_then(|n| n.to_str())
+ .unwrap_or("Image")
+ .to_string();
std::fs::remove_file(&temporary_image_path).expect("remove temp png");
let expected_file_uri = MentionUri::File {
@@ -4103,12 +4090,16 @@ mod tests {
}
.to_uri()
.to_string();
- let expected_image_uri = MentionUri::PastedImage.to_uri().to_string();
+ let expected_image_uri = MentionUri::PastedImage {
+ name: image_name.clone(),
+ }
+ .to_uri()
+ .to_string();
editor.update(&mut cx, |editor, cx| {
assert_eq!(
editor.text(cx),
- format!("[@Image]({expected_image_uri}) [@file.txt]({expected_file_uri}) ")
+ format!("[@{image_name}]({expected_image_uri}) [@file.txt]({expected_file_uri}) ")
);
});
@@ -4116,7 +4107,7 @@ mod tests {
assert_eq!(contents.len(), 2);
assert!(contents.iter().any(|(uri, mention)| {
- *uri == MentionUri::PastedImage && matches!(mention, Mention::Image(_))
+ matches!(uri, MentionUri::PastedImage { .. }) && matches!(mention, Mention::Image(_))
}));
assert!(contents.iter().any(|(uri, mention)| {
*uri == MentionUri::File {
@@ -90,6 +90,7 @@ impl ProfileSelector {
if let Some((next_profile_id, _)) = profiles.get_index(next_index) {
self.provider.set_profile(next_profile_id.clone(), cx);
+ cx.notify();
}
}
@@ -17,7 +17,7 @@ use ui::{
prelude::*,
};
use util::ResultExt;
-use workspace::{ModalView, MultiWorkspace, Workspace};
+use workspace::{ModalView, MultiWorkspace, PathList, Workspace};
use crate::{
Agent, AgentPanel,
@@ -500,6 +500,7 @@ fn collect_importable_threads(
updated_at: session.updated_at.unwrap_or_else(|| Utc::now()),
created_at: session.created_at,
folder_paths,
+ main_worktree_paths: PathList::default(),
archived: true,
});
}
@@ -66,6 +66,7 @@ fn migrate_thread_metadata(cx: &mut App) {
updated_at: entry.updated_at,
created_at: entry.created_at,
folder_paths: entry.folder_paths,
+ main_worktree_paths: PathList::default(),
archived: true,
})
})
@@ -126,6 +127,7 @@ pub struct ThreadMetadata {
pub updated_at: DateTime<Utc>,
pub created_at: Option<DateTime<Utc>>,
pub folder_paths: PathList,
+ pub main_worktree_paths: PathList,
pub archived: bool,
}
@@ -149,6 +151,7 @@ pub struct ThreadMetadataStore {
db: ThreadMetadataDb,
threads: HashMap<acp::SessionId, ThreadMetadata>,
threads_by_paths: HashMap<PathList, HashSet<acp::SessionId>>,
+ threads_by_main_paths: HashMap<PathList, HashSet<acp::SessionId>>,
reload_task: Option<Shared<Task<()>>>,
session_subscriptions: HashMap<acp::SessionId, Subscription>,
pending_thread_ops_tx: smol::channel::Sender<DbOperation>,
@@ -238,6 +241,21 @@ impl ThreadMetadataStore {
.filter(|s| !s.archived)
}
+ /// Returns threads whose `main_worktree_paths` matches the given path list,
+ /// excluding archived threads. This finds threads that were opened in a
+ /// linked worktree but are associated with the given main worktree.
+ pub fn entries_for_main_worktree_path(
+ &self,
+ path_list: &PathList,
+ ) -> impl Iterator<Item = &ThreadMetadata> + '_ {
+ self.threads_by_main_paths
+ .get(path_list)
+ .into_iter()
+ .flatten()
+ .filter_map(|s| self.threads.get(s))
+ .filter(|s| !s.archived)
+ }
+
fn reload(&mut self, cx: &mut Context<Self>) -> Shared<Task<()>> {
let db = self.db.clone();
self.reload_task.take();
@@ -254,12 +272,19 @@ impl ThreadMetadataStore {
this.update(cx, |this, cx| {
this.threads.clear();
this.threads_by_paths.clear();
+ this.threads_by_main_paths.clear();
for row in rows {
this.threads_by_paths
.entry(row.folder_paths.clone())
.or_default()
.insert(row.session_id.clone());
+ if !row.main_worktree_paths.is_empty() {
+ this.threads_by_main_paths
+ .entry(row.main_worktree_paths.clone())
+ .or_default()
+ .insert(row.session_id.clone());
+ }
this.threads.insert(row.session_id.clone(), row);
}
@@ -298,12 +323,22 @@ impl ThreadMetadataStore {
}
fn save_internal(&mut self, metadata: ThreadMetadata) {
- // If the folder paths have changed, we need to clear the old entry
- if let Some(thread) = self.threads.get(&metadata.session_id)
- && thread.folder_paths != metadata.folder_paths
- && let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths)
- {
- session_ids.remove(&metadata.session_id);
+ if let Some(thread) = self.threads.get(&metadata.session_id) {
+ if thread.folder_paths != metadata.folder_paths {
+ if let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths) {
+ session_ids.remove(&metadata.session_id);
+ }
+ }
+ if thread.main_worktree_paths != metadata.main_worktree_paths
+ && !thread.main_worktree_paths.is_empty()
+ {
+ if let Some(session_ids) = self
+ .threads_by_main_paths
+ .get_mut(&thread.main_worktree_paths)
+ {
+ session_ids.remove(&metadata.session_id);
+ }
+ }
}
self.threads
@@ -314,6 +349,13 @@ impl ThreadMetadataStore {
.or_default()
.insert(metadata.session_id.clone());
+ if !metadata.main_worktree_paths.is_empty() {
+ self.threads_by_main_paths
+ .entry(metadata.main_worktree_paths.clone())
+ .or_default()
+ .insert(metadata.session_id.clone());
+ }
+
self.pending_thread_ops_tx
.try_send(DbOperation::Upsert(metadata))
.log_err();
@@ -370,10 +412,18 @@ impl ThreadMetadataStore {
return;
}
- if let Some(thread) = self.threads.get(&session_id)
- && let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths)
- {
- session_ids.remove(&session_id);
+ if let Some(thread) = self.threads.get(&session_id) {
+ if let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths) {
+ session_ids.remove(&session_id);
+ }
+ if !thread.main_worktree_paths.is_empty() {
+ if let Some(session_ids) = self
+ .threads_by_main_paths
+ .get_mut(&thread.main_worktree_paths)
+ {
+ session_ids.remove(&session_id);
+ }
+ }
}
self.threads.remove(&session_id);
self.pending_thread_ops_tx
@@ -397,14 +447,9 @@ impl ThreadMetadataStore {
let weak_store = weak_store.clone();
move |thread, cx| {
weak_store
- .update(cx, |store, cx| {
+ .update(cx, |store, _cx| {
let session_id = thread.session_id().clone();
store.session_subscriptions.remove(&session_id);
- if thread.entries().is_empty() {
- // Empty threads can be unloaded without ever being
- // durably persisted by the underlying agent.
- store.delete(session_id, cx);
- }
})
.ok();
}
@@ -449,6 +494,7 @@ impl ThreadMetadataStore {
db,
threads: HashMap::default(),
threads_by_paths: HashMap::default(),
+ threads_by_main_paths: HashMap::default(),
reload_task: None,
session_subscriptions: HashMap::default(),
pending_thread_ops_tx: tx,
@@ -494,6 +540,10 @@ impl ThreadMetadataStore {
| AcpThreadEvent::Refusal
| AcpThreadEvent::WorkingDirectoriesUpdated => {
let thread_ref = thread.read(cx);
+ if thread_ref.entries().is_empty() {
+ return;
+ }
+
let existing_thread = self.threads.get(thread_ref.session_id());
let session_id = thread_ref.session_id().clone();
let title = thread_ref
@@ -517,6 +567,20 @@ impl ThreadMetadataStore {
PathList::new(&paths)
};
+ let main_worktree_paths = {
+ let project = thread_ref.project().read(cx);
+ let mut main_paths: Vec<Arc<Path>> = Vec::new();
+ for repo in project.repositories(cx).values() {
+ let snapshot = repo.read(cx).snapshot();
+ if snapshot.is_linked_worktree() {
+ main_paths.push(snapshot.original_repo_abs_path.clone());
+ }
+ }
+ main_paths.sort();
+ main_paths.dedup();
+ PathList::new(&main_paths)
+ };
+
// Threads without a folder path (e.g. started in an empty
// window) are archived by default so they don't get lost,
// because they won't show up in the sidebar. Users can reload
@@ -532,6 +596,7 @@ impl ThreadMetadataStore {
created_at: Some(created_at),
updated_at,
folder_paths,
+ main_worktree_paths,
archived,
};
@@ -567,6 +632,8 @@ impl Domain for ThreadMetadataDb {
) STRICT;
),
sql!(ALTER TABLE sidebar_threads ADD COLUMN archived INTEGER DEFAULT 0),
+ sql!(ALTER TABLE sidebar_threads ADD COLUMN main_worktree_paths TEXT),
+ sql!(ALTER TABLE sidebar_threads ADD COLUMN main_worktree_paths_order TEXT),
];
}
@@ -583,7 +650,7 @@ impl ThreadMetadataDb {
/// List all sidebar thread metadata, ordered by updated_at descending.
pub fn list(&self) -> anyhow::Result<Vec<ThreadMetadata>> {
self.select::<ThreadMetadata>(
- "SELECT session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived \
+ "SELECT session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived, main_worktree_paths, main_worktree_paths_order \
FROM sidebar_threads \
ORDER BY updated_at DESC"
)?()
@@ -606,11 +673,18 @@ impl ThreadMetadataDb {
} else {
(Some(serialized.paths), Some(serialized.order))
};
+ let main_serialized = row.main_worktree_paths.serialize();
+ let (main_worktree_paths, main_worktree_paths_order) = if row.main_worktree_paths.is_empty()
+ {
+ (None, None)
+ } else {
+ (Some(main_serialized.paths), Some(main_serialized.order))
+ };
let archived = row.archived;
self.write(move |conn| {
- let sql = "INSERT INTO sidebar_threads(session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived) \
- VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8) \
+ let sql = "INSERT INTO sidebar_threads(session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived, main_worktree_paths, main_worktree_paths_order) \
+ VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10) \
ON CONFLICT(session_id) DO UPDATE SET \
agent_id = excluded.agent_id, \
title = excluded.title, \
@@ -618,7 +692,9 @@ impl ThreadMetadataDb {
created_at = excluded.created_at, \
folder_paths = excluded.folder_paths, \
folder_paths_order = excluded.folder_paths_order, \
- archived = excluded.archived";
+ archived = excluded.archived, \
+ main_worktree_paths = excluded.main_worktree_paths, \
+ main_worktree_paths_order = excluded.main_worktree_paths_order";
let mut stmt = Statement::prepare(conn, sql)?;
let mut i = stmt.bind(&id, 1)?;
i = stmt.bind(&agent_id, i)?;
@@ -627,7 +703,9 @@ impl ThreadMetadataDb {
i = stmt.bind(&created_at, i)?;
i = stmt.bind(&folder_paths, i)?;
i = stmt.bind(&folder_paths_order, i)?;
- stmt.bind(&archived, i)?;
+ i = stmt.bind(&archived, i)?;
+ i = stmt.bind(&main_worktree_paths, i)?;
+ stmt.bind(&main_worktree_paths_order, i)?;
stmt.exec()
})
.await
@@ -657,6 +735,10 @@ impl Column for ThreadMetadata {
let (folder_paths_order_str, next): (Option<String>, i32) =
Column::column(statement, next)?;
let (archived, next): (bool, i32) = Column::column(statement, next)?;
+ let (main_worktree_paths_str, next): (Option<String>, i32) =
+ Column::column(statement, next)?;
+ let (main_worktree_paths_order_str, next): (Option<String>, i32) =
+ Column::column(statement, next)?;
let agent_id = agent_id
.map(|id| AgentId::new(id))
@@ -678,6 +760,15 @@ impl Column for ThreadMetadata {
})
.unwrap_or_default();
+ let main_worktree_paths = main_worktree_paths_str
+ .map(|paths| {
+ PathList::deserialize(&util::path_list::SerializedPathList {
+ paths,
+ order: main_worktree_paths_order_str.unwrap_or_default(),
+ })
+ })
+ .unwrap_or_default();
+
Ok((
ThreadMetadata {
session_id: acp::SessionId::new(id),
@@ -686,6 +777,7 @@ impl Column for ThreadMetadata {
updated_at,
created_at,
folder_paths,
+ main_worktree_paths,
archived,
},
next,
@@ -742,6 +834,7 @@ mod tests {
updated_at,
created_at: Some(updated_at),
folder_paths,
+ main_worktree_paths: PathList::default(),
}
}
@@ -957,6 +1050,7 @@ mod tests {
updated_at: now - chrono::Duration::seconds(10),
created_at: Some(now - chrono::Duration::seconds(10)),
folder_paths: project_a_paths.clone(),
+ main_worktree_paths: PathList::default(),
archived: false,
};
@@ -1066,6 +1160,7 @@ mod tests {
updated_at: existing_updated_at,
created_at: Some(existing_updated_at),
folder_paths: project_paths.clone(),
+ main_worktree_paths: PathList::default(),
archived: false,
};
@@ -1197,7 +1292,7 @@ mod tests {
}
#[gpui::test]
- async fn test_empty_thread_metadata_deleted_when_thread_released(cx: &mut TestAppContext) {
+ async fn test_empty_thread_events_do_not_create_metadata(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
@@ -1227,11 +1322,16 @@ mod tests {
.entry_ids()
.collect::<Vec<_>>()
});
- assert_eq!(metadata_ids, vec![session_id]);
+ assert!(
+ metadata_ids.is_empty(),
+ "expected empty draft thread title updates to be ignored"
+ );
- drop(thread);
- cx.update(|_| {});
- cx.run_until_parked();
+ cx.update(|cx| {
+ thread.update(cx, |thread, cx| {
+ thread.push_user_content_block(None, "Hello".into(), cx);
+ });
+ });
cx.run_until_parked();
let metadata_ids = cx.update(|cx| {
@@ -1240,10 +1340,7 @@ mod tests {
.entry_ids()
.collect::<Vec<_>>()
});
- assert!(
- metadata_ids.is_empty(),
- "expected empty draft thread metadata to be deleted on release"
- );
+ assert_eq!(metadata_ids, vec![session_id]);
}
#[gpui::test]
@@ -1318,6 +1415,7 @@ mod tests {
cx.update(|cx| {
thread_without_worktree.update(cx, |thread, cx| {
+ thread.push_user_content_block(None, "content".into(), cx);
thread.set_title("No Project Thread".into(), cx).detach();
});
});
@@ -1338,6 +1436,7 @@ mod tests {
cx.update(|cx| {
thread_with_worktree.update(cx, |thread, cx| {
+ thread.push_user_content_block(None, "content".into(), cx);
thread.set_title("Project Thread".into(), cx).detach();
});
});
@@ -1393,6 +1492,7 @@ mod tests {
// Set a title on the regular thread to trigger a save via handle_thread_update.
cx.update(|cx| {
regular_thread.update(cx, |thread, cx| {
+ thread.push_user_content_block(None, "content".into(), cx);
thread.set_title("Regular Thread".into(), cx).detach();
});
});
@@ -91,14 +91,16 @@ impl TimeBucket {
}
fn fuzzy_match_positions(query: &str, text: &str) -> Option<Vec<usize>> {
- let query = query.to_lowercase();
- let text_lower = text.to_lowercase();
let mut positions = Vec::new();
let mut query_chars = query.chars().peekable();
- for (i, c) in text_lower.chars().enumerate() {
- if query_chars.peek() == Some(&c) {
- positions.push(i);
- query_chars.next();
+ for (byte_idx, candidate_char) in text.char_indices() {
+ if let Some(&query_char) = query_chars.peek() {
+ if candidate_char.eq_ignore_ascii_case(&query_char) {
+ positions.push(byte_idx);
+ query_chars.next();
+ }
+ } else {
+ break;
}
}
if query_chars.peek().is_none() {
@@ -216,6 +218,13 @@ impl ThreadsArchiveView {
handle.focus(window, cx);
}
+ pub fn is_filter_editor_focused(&self, window: &Window, cx: &App) -> bool {
+ self.filter_editor
+ .read(cx)
+ .focus_handle(cx)
+ .is_focused(window)
+ }
+
fn update_items(&mut self, cx: &mut Context<Self>) {
let sessions = ThreadMetadataStore::global(cx)
.read(cx)
@@ -344,7 +353,6 @@ impl ThreadsArchiveView {
.map(|mw| {
mw.read(cx)
.workspaces()
- .iter()
.filter_map(|ws| ws.read(cx).database_id())
.collect()
})
@@ -1283,3 +1291,59 @@ impl PickerDelegate for ProjectPickerDelegate {
)
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_fuzzy_match_positions_returns_byte_indices() {
+ // "🔥abc" — the fire emoji is 4 bytes, so 'a' starts at byte 4, 'b' at 5, 'c' at 6.
+ let text = "🔥abc";
+ let positions = fuzzy_match_positions("ab", text).expect("should match");
+ assert_eq!(positions, vec![4, 5]);
+
+ // Verify positions are valid char boundaries (this is the assertion that
+ // panicked before the fix).
+ for &pos in &positions {
+ assert!(
+ text.is_char_boundary(pos),
+ "position {pos} is not a valid UTF-8 boundary in {text:?}"
+ );
+ }
+ }
+
+ #[test]
+ fn test_fuzzy_match_positions_ascii_still_works() {
+ let positions = fuzzy_match_positions("he", "hello").expect("should match");
+ assert_eq!(positions, vec![0, 1]);
+ }
+
+ #[test]
+ fn test_fuzzy_match_positions_case_insensitive() {
+ let positions = fuzzy_match_positions("HE", "hello").expect("should match");
+ assert_eq!(positions, vec![0, 1]);
+ }
+
+ #[test]
+ fn test_fuzzy_match_positions_no_match() {
+ assert!(fuzzy_match_positions("xyz", "hello").is_none());
+ }
+
+ #[test]
+ fn test_fuzzy_match_positions_multi_byte_interior() {
+ // "café" — 'é' is 2 bytes (0xC3 0xA9), so 'f' starts at byte 4, 'é' at byte 5.
+ let text = "café";
+ let positions = fuzzy_match_positions("fé", text).expect("should match");
+ // 'c'=0, 'a'=1, 'f'=2, 'é'=3..4 — wait, let's verify:
+ // Actually: c=1 byte, a=1 byte, f=1 byte, é=2 bytes
+ // So byte positions: c=0, a=1, f=2, é=3
+ assert_eq!(positions, vec![2, 3]);
+ for &pos in &positions {
+ assert!(
+ text.is_char_boundary(pos),
+ "position {pos} is not a valid UTF-8 boundary in {text:?}"
+ );
+ }
+ }
+}
@@ -184,7 +184,7 @@ fn open_mention_uri(
MentionUri::Fetch { url } => {
cx.open_url(url.as_str());
}
- MentionUri::PastedImage
+ MentionUri::PastedImage { .. }
| MentionUri::Selection { abs_path: None, .. }
| MentionUri::Diagnostics { .. }
| MentionUri::TerminalSelection { .. }
@@ -171,9 +171,9 @@ impl sum_tree::Item for PendingHunk {
impl sum_tree::Summary for DiffHunkSummary {
type Context<'a> = &'a text::BufferSnapshot;
- fn zero(_cx: Self::Context<'_>) -> Self {
+ fn zero(buffer: &text::BufferSnapshot) -> Self {
DiffHunkSummary {
- buffer_range: Anchor::MIN..Anchor::MIN,
+ buffer_range: Anchor::min_min_range_for_buffer(buffer.remote_id()),
diff_base_byte_range: 0..0,
added_rows: 0,
removed_rows: 0,
@@ -248,6 +248,10 @@ impl BufferDiffSnapshot {
buffer_diff.update(cx, |buffer_diff, cx| buffer_diff.snapshot(cx))
}
+ pub fn buffer_id(&self) -> BufferId {
+ self.inner.buffer_snapshot.remote_id()
+ }
+
pub fn is_empty(&self) -> bool {
self.inner.hunks.is_empty()
}
@@ -953,7 +957,7 @@ impl BufferDiffInner<language::BufferSnapshot> {
.flat_map(move |hunk| {
[
(
- &hunk.buffer_range.start,
+ hunk.buffer_range.start,
(
hunk.buffer_range.start,
hunk.diff_base_byte_range.start,
@@ -961,7 +965,7 @@ impl BufferDiffInner<language::BufferSnapshot> {
),
),
(
- &hunk.buffer_range.end,
+ hunk.buffer_range.end,
(hunk.buffer_range.end, hunk.diff_base_byte_range.end, hunk),
),
]
@@ -1653,7 +1657,7 @@ impl BufferDiff {
) {
let hunks = self
.snapshot(cx)
- .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer)
+ .hunks_intersecting_range(Anchor::min_max_range_for_buffer(buffer.remote_id()), buffer)
.collect::<Vec<_>>();
let Some(secondary) = self.secondary_diff.clone() else {
return;
@@ -21,7 +21,7 @@ use language::LanguageRegistry;
use livekit::{LocalTrackPublication, ParticipantIdentity, RoomEvent};
use livekit_client::{self as livekit, AudioStream, TrackSid};
use postage::{sink::Sink, stream::Stream, watch};
-use project::Project;
+use project::{CURRENT_PROJECT_FEATURES, Project};
use settings::Settings as _;
use std::sync::atomic::AtomicU64;
use std::{future::Future, mem, rc::Rc, sync::Arc, time::Duration, time::Instant};
@@ -1237,6 +1237,10 @@ impl Room {
worktrees: project.read(cx).worktree_metadata_protos(cx),
is_ssh_project: project.read(cx).is_via_remote_server(),
windows_paths: Some(project.read(cx).path_style(cx) == PathStyle::Windows),
+ features: CURRENT_PROJECT_FEATURES
+ .iter()
+ .map(|s| s.to_string())
+ .collect(),
});
cx.spawn(async move |this, cx| {
@@ -21,6 +21,7 @@ pub enum CliRequest {
reuse: bool,
env: Option<HashMap<String, String>>,
user_data_dir: Option<String>,
+ dev_container: bool,
},
}
@@ -118,6 +118,12 @@ struct Args {
/// Will attempt to give the correct command to run
#[arg(long)]
system_specs: bool,
+ /// Open the project in a dev container.
+ ///
+ /// Automatically triggers "Reopen in Dev Container" if a `.devcontainer/`
+ /// configuration is found in the project directory.
+ #[arg(long)]
+ dev_container: bool,
/// Pairs of file paths to diff. Can be specified multiple times.
/// When directories are provided, recurses into them and shows all changed files in a single multi-diff view.
#[arg(long, action = clap::ArgAction::Append, num_args = 2, value_names = ["OLD_PATH", "NEW_PATH"])]
@@ -670,6 +676,7 @@ fn main() -> Result<()> {
reuse: args.reuse,
env,
user_data_dir: user_data_dir_for_thread,
+ dev_container: args.dev_container,
})?;
while let Ok(response) = rx.recv() {
@@ -22,6 +22,7 @@ base64.workspace = true
chrono = { workspace = true, features = ["serde"] }
clock.workspace = true
cloud_api_client.workspace = true
+cloud_api_types.workspace = true
cloud_llm_client.workspace = true
collections.workspace = true
credentials_provider.workspace = true
@@ -35,6 +36,7 @@ gpui_tokio.workspace = true
http_client.workspace = true
http_client_tls.workspace = true
httparse = "1.10"
+language_model.workspace = true
log.workspace = true
parking_lot.workspace = true
paths.workspace = true
@@ -60,6 +62,7 @@ tokio.workspace = true
url.workspace = true
util.workspace = true
worktree.workspace = true
+zed_credentials_provider.workspace = true
[dev-dependencies]
clock = { workspace = true, features = ["test-support"] }
@@ -1,6 +1,7 @@
#[cfg(any(test, feature = "test-support"))]
pub mod test;
+mod llm_token;
mod proxy;
pub mod telemetry;
pub mod user;
@@ -13,8 +14,9 @@ use async_tungstenite::tungstenite::{
http::{HeaderValue, Request, StatusCode},
};
use clock::SystemClock;
-use cloud_api_client::CloudApiClient;
use cloud_api_client::websocket_protocol::MessageToClient;
+use cloud_api_client::{ClientApiError, CloudApiClient};
+use cloud_api_types::OrganizationId;
use credentials_provider::CredentialsProvider;
use feature_flags::FeatureFlagAppExt as _;
use futures::{
@@ -24,6 +26,7 @@ use futures::{
};
use gpui::{App, AsyncApp, Entity, Global, Task, WeakEntity, actions};
use http_client::{HttpClient, HttpClientWithUrl, http, read_proxy_from_env};
+use language_model::LlmApiToken;
use parking_lot::{Mutex, RwLock};
use postage::watch;
use proxy::connect_proxy_stream;
@@ -51,6 +54,7 @@ use tokio::net::TcpStream;
use url::Url;
use util::{ConnectionResult, ResultExt};
+pub use llm_token::*;
pub use rpc::*;
pub use telemetry_events::Event;
pub use user::*;
@@ -339,7 +343,7 @@ pub struct ClientCredentialsProvider {
impl ClientCredentialsProvider {
pub fn new(cx: &App) -> Self {
Self {
- provider: <dyn CredentialsProvider>::global(cx),
+ provider: zed_credentials_provider::global(cx),
}
}
@@ -568,6 +572,10 @@ impl Client {
self.http.clone()
}
+ pub fn credentials_provider(&self) -> Arc<dyn CredentialsProvider> {
+ self.credentials_provider.provider.clone()
+ }
+
pub fn cloud_client(&self) -> Arc<CloudApiClient> {
self.cloud_client.clone()
}
@@ -1513,6 +1521,66 @@ impl Client {
})
}
+ pub async fn acquire_llm_token(
+ &self,
+ llm_token: &LlmApiToken,
+ organization_id: Option<OrganizationId>,
+ ) -> Result<String> {
+ let system_id = self.telemetry().system_id().map(|x| x.to_string());
+ let cloud_client = self.cloud_client();
+ match llm_token
+ .acquire(&cloud_client, system_id, organization_id)
+ .await
+ {
+ Ok(token) => Ok(token),
+ Err(ClientApiError::Unauthorized) => {
+ self.request_sign_out();
+ Err(ClientApiError::Unauthorized).context("Failed to create LLM token")
+ }
+ Err(err) => Err(anyhow::Error::from(err)),
+ }
+ }
+
+ pub async fn refresh_llm_token(
+ &self,
+ llm_token: &LlmApiToken,
+ organization_id: Option<OrganizationId>,
+ ) -> Result<String> {
+ let system_id = self.telemetry().system_id().map(|x| x.to_string());
+ let cloud_client = self.cloud_client();
+ match llm_token
+ .refresh(&cloud_client, system_id, organization_id)
+ .await
+ {
+ Ok(token) => Ok(token),
+ Err(ClientApiError::Unauthorized) => {
+ self.request_sign_out();
+ return Err(ClientApiError::Unauthorized).context("Failed to create LLM token");
+ }
+ Err(err) => return Err(anyhow::Error::from(err)),
+ }
+ }
+
+ pub async fn clear_and_refresh_llm_token(
+ &self,
+ llm_token: &LlmApiToken,
+ organization_id: Option<OrganizationId>,
+ ) -> Result<String> {
+ let system_id = self.telemetry().system_id().map(|x| x.to_string());
+ let cloud_client = self.cloud_client();
+ match llm_token
+ .clear_and_refresh(&cloud_client, system_id, organization_id)
+ .await
+ {
+ Ok(token) => Ok(token),
+ Err(ClientApiError::Unauthorized) => {
+ self.request_sign_out();
+ return Err(ClientApiError::Unauthorized).context("Failed to create LLM token");
+ }
+ Err(err) => return Err(anyhow::Error::from(err)),
+ }
+ }
+
pub async fn sign_out(self: &Arc<Self>, cx: &AsyncApp) {
self.state.write().credentials = None;
self.cloud_client.clear_credentials();
@@ -2141,11 +2209,13 @@ mod tests {
project_id: 1,
committer_name: None,
committer_email: None,
+ features: Vec::new(),
});
server.send(proto::JoinProject {
project_id: 2,
committer_name: None,
committer_email: None,
+ features: Vec::new(),
});
done_rx1.recv().await.unwrap();
done_rx2.recv().await.unwrap();
@@ -0,0 +1,116 @@
+use super::{Client, UserStore};
+use cloud_api_types::websocket_protocol::MessageToClient;
+use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME};
+use gpui::{
+ App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _, Subscription,
+};
+use language_model::LlmApiToken;
+use std::sync::Arc;
+
+pub trait NeedsLlmTokenRefresh {
+ /// Returns whether the LLM token needs to be refreshed.
+ fn needs_llm_token_refresh(&self) -> bool;
+}
+
+impl NeedsLlmTokenRefresh for http_client::Response<http_client::AsyncBody> {
+ fn needs_llm_token_refresh(&self) -> bool {
+ self.headers().get(EXPIRED_LLM_TOKEN_HEADER_NAME).is_some()
+ || self.headers().get(OUTDATED_LLM_TOKEN_HEADER_NAME).is_some()
+ }
+}
+
+enum TokenRefreshMode {
+ Refresh,
+ ClearAndRefresh,
+}
+
+pub fn global_llm_token(cx: &App) -> LlmApiToken {
+ RefreshLlmTokenListener::global(cx)
+ .read(cx)
+ .llm_api_token
+ .clone()
+}
+
+struct GlobalRefreshLlmTokenListener(Entity<RefreshLlmTokenListener>);
+
+impl Global for GlobalRefreshLlmTokenListener {}
+
+pub struct LlmTokenRefreshedEvent;
+
+pub struct RefreshLlmTokenListener {
+ client: Arc<Client>,
+ user_store: Entity<UserStore>,
+ llm_api_token: LlmApiToken,
+ _subscription: Subscription,
+}
+
+impl EventEmitter<LlmTokenRefreshedEvent> for RefreshLlmTokenListener {}
+
+impl RefreshLlmTokenListener {
+ pub fn register(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
+ let listener = cx.new(|cx| RefreshLlmTokenListener::new(client, user_store, cx));
+ cx.set_global(GlobalRefreshLlmTokenListener(listener));
+ }
+
+ pub fn global(cx: &App) -> Entity<Self> {
+ GlobalRefreshLlmTokenListener::global(cx).0.clone()
+ }
+
+ fn new(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut Context<Self>) -> Self {
+ client.add_message_to_client_handler({
+ let this = cx.weak_entity();
+ move |message, cx| {
+ if let Some(this) = this.upgrade() {
+ Self::handle_refresh_llm_token(this, message, cx);
+ }
+ }
+ });
+
+ let subscription = cx.subscribe(&user_store, |this, _user_store, event, cx| {
+ if matches!(event, super::user::Event::OrganizationChanged) {
+ this.refresh(TokenRefreshMode::ClearAndRefresh, cx);
+ }
+ });
+
+ Self {
+ client,
+ user_store,
+ llm_api_token: LlmApiToken::default(),
+ _subscription: subscription,
+ }
+ }
+
+ fn refresh(&self, mode: TokenRefreshMode, cx: &mut Context<Self>) {
+ let client = self.client.clone();
+ let llm_api_token = self.llm_api_token.clone();
+ let organization_id = self
+ .user_store
+ .read(cx)
+ .current_organization()
+ .map(|organization| organization.id.clone());
+ cx.spawn(async move |this, cx| {
+ match mode {
+ TokenRefreshMode::Refresh => {
+ client
+ .refresh_llm_token(&llm_api_token, organization_id)
+ .await?;
+ }
+ TokenRefreshMode::ClearAndRefresh => {
+ client
+ .clear_and_refresh_llm_token(&llm_api_token, organization_id)
+ .await?;
+ }
+ }
+ this.update(cx, |_this, cx| cx.emit(LlmTokenRefreshedEvent))
+ })
+ .detach_and_log_err(cx);
+ }
+
+ fn handle_refresh_llm_token(this: Entity<Self>, message: &MessageToClient, cx: &mut App) {
+ match message {
+ MessageToClient::UserUpdated => {
+ this.update(cx, |this, cx| this.refresh(TokenRefreshMode::Refresh, cx));
+ }
+ }
+ }
+}
@@ -22,6 +22,7 @@ log.workspace = true
serde.workspace = true
serde_json.workspace = true
text.workspace = true
+zed_credentials_provider.workspace = true
zeta_prompt.workspace = true
[dev-dependencies]
@@ -48,9 +48,10 @@ pub fn codestral_api_key(cx: &App) -> Option<Arc<str>> {
}
pub fn load_codestral_api_key(cx: &mut App) -> Task<Result<(), AuthenticateError>> {
+ let credentials_provider = zed_credentials_provider::global(cx);
let api_url = codestral_api_url(cx);
codestral_api_key_state(cx).update(cx, |key_state, cx| {
- key_state.load_if_needed(api_url, |s| s, cx)
+ key_state.load_if_needed(api_url, |s| s, credentials_provider, cx)
})
}
@@ -48,7 +48,8 @@ CREATE TABLE "projects" (
"host_connection_id" INTEGER,
"host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE,
"unregistered" BOOLEAN NOT NULL DEFAULT FALSE,
- "windows_paths" BOOLEAN NOT NULL DEFAULT FALSE
+ "windows_paths" BOOLEAN NOT NULL DEFAULT FALSE,
+ "features" TEXT NOT NULL DEFAULT ''
);
CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id");
@@ -64,6 +65,7 @@ CREATE TABLE "worktrees" (
"scan_id" INTEGER NOT NULL,
"is_complete" BOOL NOT NULL DEFAULT FALSE,
"completed_scan_id" INTEGER NOT NULL,
+ "root_repo_common_dir" VARCHAR,
PRIMARY KEY (project_id, id)
);
@@ -332,7 +332,8 @@ CREATE TABLE public.projects (
room_id integer,
host_connection_id integer,
host_connection_server_id integer,
- windows_paths boolean DEFAULT false
+ windows_paths boolean DEFAULT false,
+ features text NOT NULL DEFAULT ''
);
CREATE SEQUENCE public.projects_id_seq
@@ -483,7 +484,8 @@ CREATE TABLE public.worktrees (
visible boolean NOT NULL,
scan_id bigint NOT NULL,
is_complete boolean DEFAULT false NOT NULL,
- completed_scan_id bigint
+ completed_scan_id bigint,
+ root_repo_common_dir character varying
);
ALTER TABLE ONLY public.breakpoints ALTER COLUMN id SET DEFAULT nextval('public.breakpoints_id_seq'::regclass);
@@ -559,6 +559,7 @@ pub struct RejoinedWorktree {
pub settings_files: Vec<WorktreeSettingsFile>,
pub scan_id: u64,
pub completed_scan_id: u64,
+ pub root_repo_common_dir: Option<String>,
}
pub struct LeftRoom {
@@ -589,6 +590,7 @@ pub struct Project {
pub repositories: Vec<proto::UpdateRepository>,
pub language_servers: Vec<LanguageServer>,
pub path_style: PathStyle,
+ pub features: Vec<String>,
}
pub struct ProjectCollaborator {
@@ -637,6 +639,7 @@ pub struct Worktree {
pub settings_files: Vec<WorktreeSettingsFile>,
pub scan_id: u64,
pub completed_scan_id: u64,
+ pub root_repo_common_dir: Option<String>,
}
#[derive(Debug)]
@@ -34,6 +34,7 @@ impl Database {
worktrees: &[proto::WorktreeMetadata],
is_ssh_project: bool,
windows_paths: bool,
+ features: &[String],
) -> Result<TransactionGuard<(ProjectId, proto::Room)>> {
self.room_transaction(room_id, |tx| async move {
let participant = room_participant::Entity::find()
@@ -71,6 +72,7 @@ impl Database {
))),
id: ActiveValue::NotSet,
windows_paths: ActiveValue::set(windows_paths),
+ features: ActiveValue::set(serde_json::to_string(features).unwrap()),
}
.insert(&*tx)
.await?;
@@ -85,6 +87,7 @@ impl Database {
visible: ActiveValue::set(worktree.visible),
scan_id: ActiveValue::set(0),
completed_scan_id: ActiveValue::set(0),
+ root_repo_common_dir: ActiveValue::set(None),
}
}))
.exec(&*tx)
@@ -201,6 +204,7 @@ impl Database {
visible: ActiveValue::set(worktree.visible),
scan_id: ActiveValue::set(0),
completed_scan_id: ActiveValue::set(0),
+ root_repo_common_dir: ActiveValue::set(None),
}))
.on_conflict(
OnConflict::columns([worktree::Column::ProjectId, worktree::Column::Id])
@@ -264,6 +268,7 @@ impl Database {
ActiveValue::default()
},
abs_path: ActiveValue::set(update.abs_path.clone()),
+ root_repo_common_dir: ActiveValue::set(update.root_repo_common_dir.clone()),
..Default::default()
})
.exec(&*tx)
@@ -759,6 +764,7 @@ impl Database {
settings_files: Default::default(),
scan_id: db_worktree.scan_id as u64,
completed_scan_id: db_worktree.completed_scan_id as u64,
+ root_repo_common_dir: db_worktree.root_repo_common_dir,
legacy_repository_entries: Default::default(),
},
)
@@ -948,6 +954,7 @@ impl Database {
} else {
PathStyle::Posix
};
+ let features: Vec<String> = serde_json::from_str(&project.features).unwrap_or_default();
let project = Project {
id: project.id,
@@ -977,6 +984,7 @@ impl Database {
})
.collect(),
path_style,
+ features,
};
Ok((project, replica_id as ReplicaId))
}
@@ -629,6 +629,7 @@ impl Database {
settings_files: Default::default(),
scan_id: db_worktree.scan_id as u64,
completed_scan_id: db_worktree.completed_scan_id as u64,
+ root_repo_common_dir: db_worktree.root_repo_common_dir,
};
let rejoined_worktree = rejoined_project
@@ -13,6 +13,7 @@ pub struct Model {
pub host_connection_id: Option<i32>,
pub host_connection_server_id: Option<ServerId>,
pub windows_paths: bool,
+ pub features: String,
}
impl Model {
@@ -15,6 +15,7 @@ pub struct Model {
pub scan_id: i64,
/// The last scan that fully completed.
pub completed_scan_id: i64,
+ pub root_repo_common_dir: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
@@ -435,6 +435,7 @@ impl Server {
.add_request_handler(forward_mutating_project_request::<proto::GitCreateRemote>)
.add_request_handler(forward_mutating_project_request::<proto::GitRemoveRemote>)
.add_request_handler(forward_read_only_project_request::<proto::GitGetWorktrees>)
+ .add_request_handler(forward_read_only_project_request::<proto::GitGetHeadSha>)
.add_request_handler(forward_mutating_project_request::<proto::GitCreateWorktree>)
.add_request_handler(disallow_guest_request::<proto::GitRemoveWorktree>)
.add_request_handler(disallow_guest_request::<proto::GitRenameWorktree>)
@@ -1485,6 +1486,7 @@ fn notify_rejoined_projects(
worktree_id: worktree.id,
abs_path: worktree.abs_path.clone(),
root_name: worktree.root_name,
+ root_repo_common_dir: worktree.root_repo_common_dir,
updated_entries: worktree.updated_entries,
removed_entries: worktree.removed_entries,
scan_id: worktree.scan_id,
@@ -1775,6 +1777,7 @@ async fn share_project(
&request.worktrees,
request.is_ssh_project,
request.windows_paths.unwrap_or(false),
+ &request.features,
)
.await?;
response.send(proto::ShareProjectResponse {
@@ -1840,6 +1843,28 @@ async fn join_project(
tracing::info!(%project_id, "join project");
let db = session.db().await;
+ let project_model = db.get_project(project_id).await?;
+ let host_features: Vec<String> =
+ serde_json::from_str(&project_model.features).unwrap_or_default();
+ let guest_features: HashSet<_> = request.features.iter().collect();
+ let host_features_set: HashSet<_> = host_features.iter().collect();
+ if guest_features != host_features_set {
+ let host_connection_id = project_model.host_connection()?;
+ let mut pool = session.connection_pool().await;
+ let host_version = pool
+ .connection(host_connection_id)
+ .map(|c| c.zed_version.to_string());
+ let guest_version = pool
+ .connection(session.connection_id)
+ .map(|c| c.zed_version.to_string());
+ drop(pool);
+ Err(anyhow!(
+ "The host (v{}) and guest (v{}) are using incompatible versions of Zed. The peer with the older version must update to collaborate.",
+ host_version.as_deref().unwrap_or("unknown"),
+ guest_version.as_deref().unwrap_or("unknown"),
+ ))?;
+ }
+
let (project, replica_id) = &mut *db
.join_project(
project_id,
@@ -1850,6 +1875,7 @@ async fn join_project(
)
.await?;
drop(db);
+
tracing::info!(%project_id, "join remote project");
let collaborators = project
.collaborators
@@ -1909,6 +1935,7 @@ async fn join_project(
language_server_capabilities,
role: project.role.into(),
windows_paths: project.path_style == PathStyle::Windows,
+ features: project.features.clone(),
})?;
for (worktree_id, worktree) in mem::take(&mut project.worktrees) {
@@ -1918,6 +1945,7 @@ async fn join_project(
worktree_id,
abs_path: worktree.abs_path.clone(),
root_name: worktree.root_name,
+ root_repo_common_dir: worktree.root_repo_common_dir,
updated_entries: worktree.entries,
removed_entries: Default::default(),
scan_id: worktree.scan_id,
@@ -313,7 +313,7 @@ fn assert_remote_selections(
let snapshot = editor.snapshot(window, cx);
let hub = editor.collaboration_hub().unwrap();
let collaborators = hub.collaborators(cx);
- let range = Anchor::min()..Anchor::max();
+ let range = Anchor::Min..Anchor::Max;
let remote_selections = snapshot
.remote_selections_in_range(&range, hub, cx)
.map(|s| {
@@ -350,20 +350,41 @@ async fn test_project_count(db: &Arc<Database>) {
.unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
- db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, false)
- .await
- .unwrap();
+ db.share_project(
+ room_id,
+ ConnectionId { owner_id, id: 1 },
+ &[],
+ false,
+ false,
+ &[],
+ )
+ .await
+ .unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
- db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, false)
- .await
- .unwrap();
+ db.share_project(
+ room_id,
+ ConnectionId { owner_id, id: 1 },
+ &[],
+ false,
+ false,
+ &[],
+ )
+ .await
+ .unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
// Projects shared by admins aren't counted.
- db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false, false)
- .await
- .unwrap();
+ db.share_project(
+ room_id,
+ ConnectionId { owner_id, id: 0 },
+ &[],
+ false,
+ false,
+ &[],
+ )
+ .await
+ .unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
db.leave_room(ConnectionId { owner_id, id: 1 })
@@ -2184,6 +2184,7 @@ async fn test_following_after_replacement(cx_a: &mut TestAppContext, cx_b: &mut
);
mb
});
+ let multibuffer_snapshot = multibuffer.update(cx_a, |mb, cx| mb.snapshot(cx));
let snapshot = buffer.update(cx_a, |buffer, _| buffer.snapshot());
let editor: Entity<Editor> = cx_a.new_window_entity(|window, cx| {
Editor::for_multibuffer(
@@ -2205,7 +2206,13 @@ async fn test_following_after_replacement(cx_a: &mut TestAppContext, cx_b: &mut
editor
.selections
.disjoint_anchor_ranges()
- .map(|range| range.start.text_anchor.to_point(&snapshot))
+ .map(|range| {
+ multibuffer_snapshot
+ .anchor_to_buffer_anchor(range.start)
+ .unwrap()
+ .0
+ .to_point(&snapshot)
+ })
.collect::<Vec<_>>()
});
multibuffer.update(cx_a, |multibuffer, cx| {
@@ -2232,7 +2239,13 @@ async fn test_following_after_replacement(cx_a: &mut TestAppContext, cx_b: &mut
editor
.selections
.disjoint_anchor_ranges()
- .map(|range| range.start.text_anchor.to_point(&snapshot))
+ .map(|range| {
+ multibuffer_snapshot
+ .anchor_to_buffer_anchor(range.start)
+ .unwrap()
+ .0
+ .to_point(&snapshot)
+ })
.collect::<Vec<_>>()
});
assert_eq!(positions, new_positions);
@@ -1,4 +1,4 @@
-use std::path::{Path, PathBuf};
+use std::path::{self, Path, PathBuf};
use call::ActiveCall;
use client::RECEIVE_TIMEOUT;
@@ -17,6 +17,61 @@ use workspace::{MultiWorkspace, Workspace};
use crate::TestServer;
+#[gpui::test]
+async fn test_root_repo_common_dir_sync(
+ executor: BackgroundExecutor,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
+) {
+ let mut server = TestServer::start(executor.clone()).await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+ server
+ .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+ .await;
+ let active_call_a = cx_a.read(ActiveCall::global);
+
+ // Set up a project whose root IS a git repository.
+ client_a
+ .fs()
+ .insert_tree(
+ path!("/project"),
+ json!({ ".git": {}, "file.txt": "content" }),
+ )
+ .await;
+
+ let (project_a, _) = client_a.build_local_project(path!("/project"), cx_a).await;
+ executor.run_until_parked();
+
+ // Host should see root_repo_common_dir pointing to .git at the root.
+ let host_common_dir = project_a.read_with(cx_a, |project, cx| {
+ let worktree = project.worktrees(cx).next().unwrap();
+ worktree.read(cx).snapshot().root_repo_common_dir().cloned()
+ });
+ assert_eq!(
+ host_common_dir.as_deref(),
+ Some(path::Path::new(path!("/project/.git"))),
+ );
+
+ // Share the project and have client B join.
+ let project_id = active_call_a
+ .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+ .await
+ .unwrap();
+ let project_b = client_b.join_remote_project(project_id, cx_b).await;
+ executor.run_until_parked();
+
+ // Guest should see the same root_repo_common_dir as the host.
+ let guest_common_dir = project_b.read_with(cx_b, |project, cx| {
+ let worktree = project.worktrees(cx).next().unwrap();
+ worktree.read(cx).snapshot().root_repo_common_dir().cloned()
+ });
+ assert_eq!(
+ guest_common_dir, host_common_dir,
+ "guest should see the same root_repo_common_dir as host",
+ );
+}
+
fn collect_diff_stats<C: gpui::AppContext>(
panel: &gpui::Entity<GitPanel>,
cx: &C,
@@ -369,6 +424,58 @@ async fn test_remote_git_worktrees(
);
}
+#[gpui::test]
+async fn test_remote_git_head_sha(
+ executor: BackgroundExecutor,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
+) {
+ let mut server = TestServer::start(executor.clone()).await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+ server
+ .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+ .await;
+ let active_call_a = cx_a.read(ActiveCall::global);
+
+ client_a
+ .fs()
+ .insert_tree(
+ path!("/project"),
+ json!({ ".git": {}, "file.txt": "content" }),
+ )
+ .await;
+
+ let (project_a, _) = client_a.build_local_project(path!("/project"), cx_a).await;
+ let local_head_sha = cx_a.update(|cx| {
+ project_a
+ .read(cx)
+ .active_repository(cx)
+ .unwrap()
+ .update(cx, |repository, _| repository.head_sha())
+ });
+ let local_head_sha = local_head_sha.await.unwrap().unwrap();
+
+ let project_id = active_call_a
+ .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+ .await
+ .unwrap();
+ let project_b = client_b.join_remote_project(project_id, cx_b).await;
+
+ executor.run_until_parked();
+
+ let remote_head_sha = cx_b.update(|cx| {
+ project_b
+ .read(cx)
+ .active_repository(cx)
+ .unwrap()
+ .update(cx, |repository, _| repository.head_sha())
+ });
+ let remote_head_sha = remote_head_sha.await.unwrap();
+
+ assert_eq!(remote_head_sha.unwrap(), local_head_sha);
+}
+
#[gpui::test]
async fn test_linked_worktrees_sync(
executor: BackgroundExecutor,
@@ -394,29 +501,29 @@ async fn test_linked_worktrees_sync(
)
.await;
- client_a
- .fs()
- .with_git_state(Path::new(path!("/project/.git")), true, |state| {
- state.worktrees.push(GitWorktree {
- path: PathBuf::from(path!("/project")),
- ref_name: Some("refs/heads/main".into()),
- sha: "aaa111".into(),
- is_main: false,
- });
- state.worktrees.push(GitWorktree {
- path: PathBuf::from(path!("/project/feature-branch")),
- ref_name: Some("refs/heads/feature-branch".into()),
- sha: "bbb222".into(),
- is_main: false,
- });
- state.worktrees.push(GitWorktree {
- path: PathBuf::from(path!("/project/bugfix-branch")),
- ref_name: Some("refs/heads/bugfix-branch".into()),
- sha: "ccc333".into(),
- is_main: false,
- });
- })
- .unwrap();
+ let fs = client_a.fs();
+ fs.add_linked_worktree_for_repo(
+ Path::new(path!("/project/.git")),
+ true,
+ GitWorktree {
+ path: PathBuf::from(path!("/worktrees/feature-branch")),
+ ref_name: Some("refs/heads/feature-branch".into()),
+ sha: "bbb222".into(),
+ is_main: false,
+ },
+ )
+ .await;
+ fs.add_linked_worktree_for_repo(
+ Path::new(path!("/project/.git")),
+ true,
+ GitWorktree {
+ path: PathBuf::from(path!("/worktrees/bugfix-branch")),
+ ref_name: Some("refs/heads/bugfix-branch".into()),
+ sha: "ccc333".into(),
+ is_main: false,
+ },
+ )
+ .await;
let (project_a, _) = client_a.build_local_project(path!("/project"), cx_a).await;
@@ -437,22 +544,22 @@ async fn test_linked_worktrees_sync(
);
assert_eq!(
host_linked[0].path,
- PathBuf::from(path!("/project/feature-branch"))
+ PathBuf::from(path!("/worktrees/bugfix-branch"))
);
assert_eq!(
host_linked[0].ref_name,
- Some("refs/heads/feature-branch".into())
+ Some("refs/heads/bugfix-branch".into())
);
- assert_eq!(host_linked[0].sha.as_ref(), "bbb222");
+ assert_eq!(host_linked[0].sha.as_ref(), "ccc333");
assert_eq!(
host_linked[1].path,
- PathBuf::from(path!("/project/bugfix-branch"))
+ PathBuf::from(path!("/worktrees/feature-branch"))
);
assert_eq!(
host_linked[1].ref_name,
- Some("refs/heads/bugfix-branch".into())
+ Some("refs/heads/feature-branch".into())
);
- assert_eq!(host_linked[1].sha.as_ref(), "ccc333");
+ assert_eq!(host_linked[1].sha.as_ref(), "bbb222");
// Share the project and have client B join.
let project_id = active_call_a
@@ -478,15 +585,17 @@ async fn test_linked_worktrees_sync(
// Now mutate: add a third linked worktree on the host side.
client_a
.fs()
- .with_git_state(Path::new(path!("/project/.git")), true, |state| {
- state.worktrees.push(GitWorktree {
- path: PathBuf::from(path!("/project/hotfix-branch")),
+ .add_linked_worktree_for_repo(
+ Path::new(path!("/project/.git")),
+ true,
+ GitWorktree {
+ path: PathBuf::from(path!("/worktrees/hotfix-branch")),
ref_name: Some("refs/heads/hotfix-branch".into()),
sha: "ddd444".into(),
is_main: false,
- });
- })
- .unwrap();
+ },
+ )
+ .await;
// Wait for the host to re-scan and propagate the update.
executor.run_until_parked();
@@ -504,7 +613,7 @@ async fn test_linked_worktrees_sync(
);
assert_eq!(
host_linked_updated[2].path,
- PathBuf::from(path!("/project/hotfix-branch"))
+ PathBuf::from(path!("/worktrees/hotfix-branch"))
);
// Verify the guest also received the update.
@@ -521,12 +630,12 @@ async fn test_linked_worktrees_sync(
// Now mutate: remove one linked worktree from the host side.
client_a
.fs()
- .with_git_state(Path::new(path!("/project/.git")), true, |state| {
- state
- .worktrees
- .retain(|wt| wt.ref_name != Some("refs/heads/bugfix-branch".into()));
- })
- .unwrap();
+ .remove_worktree_for_repo(
+ Path::new(path!("/project/.git")),
+ true,
+ "refs/heads/bugfix-branch",
+ )
+ .await;
executor.run_until_parked();
@@ -469,7 +469,7 @@ async fn test_ssh_collaboration_git_worktrees(
.unwrap();
assert_eq!(worktrees.len(), 1);
- let worktree_directory = PathBuf::from("/project");
+ let worktree_directory = PathBuf::from("/worktrees");
cx_b.update(|cx| {
repo_b.update(cx, |repo, _| {
repo.create_worktree(
@@ -536,8 +536,8 @@ async fn test_ssh_collaboration_git_worktrees(
cx_a.update(|cx| {
repo_a.update(cx, |repository, _| {
repository.rename_worktree(
- PathBuf::from("/project/feature-branch"),
- PathBuf::from("/project/renamed-branch"),
+ PathBuf::from("/worktrees/feature-branch"),
+ PathBuf::from("/worktrees/renamed-branch"),
)
})
})
@@ -559,7 +559,7 @@ async fn test_ssh_collaboration_git_worktrees(
);
assert_eq!(
host_worktrees[1].path,
- PathBuf::from("/project/renamed-branch")
+ PathBuf::from("/worktrees/renamed-branch")
);
let server_worktrees = {
@@ -588,13 +588,13 @@ async fn test_ssh_collaboration_git_worktrees(
);
assert_eq!(
server_worktrees[1].path,
- PathBuf::from("/project/renamed-branch")
+ PathBuf::from("/worktrees/renamed-branch")
);
// Host (client A) removes the renamed worktree via SSH
cx_a.update(|cx| {
repo_a.update(cx, |repository, _| {
- repository.remove_worktree(PathBuf::from("/project/renamed-branch"), false)
+ repository.remove_worktree(PathBuf::from("/worktrees/renamed-branch"), false)
})
})
.await
@@ -13,12 +13,13 @@ use db::kvp::KeyValueStore;
use editor::{Editor, EditorElement, EditorStyle};
use fuzzy::{StringMatch, StringMatchCandidate, match_strings};
use gpui::{
- AnyElement, App, AsyncWindowContext, Bounds, ClickEvent, ClipboardItem, Context, DismissEvent,
- Div, Entity, EventEmitter, FocusHandle, Focusable, FontStyle, InteractiveElement, IntoElement,
- KeyContext, ListOffset, ListState, MouseDownEvent, ParentElement, Pixels, Point, PromptLevel,
- Render, SharedString, Styled, Subscription, Task, TextStyle, WeakEntity, Window, actions,
- anchored, canvas, deferred, div, fill, list, point, prelude::*, px,
+ AnyElement, App, AsyncWindowContext, Bounds, ClickEvent, ClipboardItem, DismissEvent, Div,
+ Empty, Entity, EventEmitter, FocusHandle, Focusable, FontStyle, KeyContext, ListOffset,
+ ListState, MouseDownEvent, Pixels, Point, PromptLevel, SharedString, Subscription, Task,
+ TextStyle, WeakEntity, Window, actions, anchored, canvas, deferred, div, fill, list, point,
+ prelude::*, px,
};
+
use menu::{Cancel, Confirm, SecondaryConfirm, SelectNext, SelectPrevious};
use project::{Fs, Project};
use rpc::{
@@ -43,6 +44,9 @@ use workspace::{
notifications::{DetachAndPromptErr, NotifyResultExt},
};
+const FILTER_OCCUPIED_CHANNELS_KEY: &str = "filter_occupied_channels";
+const FAVORITE_CHANNELS_KEY: &str = "favorite_channels";
+
actions!(
collab_panel,
[
@@ -243,7 +247,9 @@ pub struct CollabPanel {
fs: Arc<dyn Fs>,
focus_handle: FocusHandle,
channel_clipboard: Option<ChannelMoveClipboard>,
- pending_serialization: Task<Option<()>>,
+ pending_panel_serialization: Task<Option<()>>,
+ pending_favorites_serialization: Task<Option<()>>,
+ pending_filter_serialization: Task<Option<()>>,
context_menu: Option<(Entity<ContextMenu>, Point<Pixels>, Subscription)>,
list_state: ListState,
filter_editor: Entity<Editor>,
@@ -259,7 +265,7 @@ pub struct CollabPanel {
subscriptions: Vec<Subscription>,
collapsed_sections: Vec<Section>,
collapsed_channels: Vec<ChannelId>,
- filter_active_channels: bool,
+ filter_occupied_channels: bool,
workspace: WeakEntity<Workspace>,
}
@@ -377,7 +383,9 @@ impl CollabPanel {
focus_handle: cx.focus_handle(),
channel_clipboard: None,
fs: workspace.app_state().fs.clone(),
- pending_serialization: Task::ready(None),
+ pending_panel_serialization: Task::ready(None),
+ pending_favorites_serialization: Task::ready(None),
+ pending_filter_serialization: Task::ready(None),
context_menu: None,
list_state: ListState::new(0, gpui::ListAlignment::Top, px(1000.)),
channel_name_editor,
@@ -392,7 +400,7 @@ impl CollabPanel {
match_candidates: Vec::default(),
collapsed_sections: vec![Section::Offline],
collapsed_channels: Vec::default(),
- filter_active_channels: false,
+ filter_occupied_channels: false,
workspace: workspace.weak_handle(),
client: workspace.app_state().client.clone(),
};
@@ -473,8 +481,22 @@ impl CollabPanel {
});
}
+ let filter_occupied_channels = KeyValueStore::global(cx)
+ .read_kvp(FILTER_OCCUPIED_CHANNELS_KEY)
+ .ok()
+ .flatten()
+ .is_some();
+
+ panel.update(cx, |panel, cx| {
+ panel.filter_occupied_channels = filter_occupied_channels;
+
+ if filter_occupied_channels {
+ panel.update_entries(false, cx);
+ }
+ });
+
let favorites: Vec<ChannelId> = KeyValueStore::global(cx)
- .read_kvp("favorite_channels")
+ .read_kvp(FAVORITE_CHANNELS_KEY)
.ok()
.flatten()
.and_then(|json| serde_json::from_str::<Vec<u64>>(&json).ok())
@@ -519,7 +541,7 @@ impl CollabPanel {
};
let kvp = KeyValueStore::global(cx);
- self.pending_serialization = cx.background_spawn(
+ self.pending_panel_serialization = cx.background_spawn(
async move {
kvp.write_kvp(
serialization_key,
@@ -779,14 +801,14 @@ impl CollabPanel {
channels.retain(|chan| channel_ids_of_matches_or_parents.contains(&chan.id));
- if self.filter_active_channels {
- let active_channel_ids_or_ancestors: HashSet<_> = channel_store
+ if self.filter_occupied_channels {
+ let occupied_channel_ids_or_ancestors: HashSet<_> = channel_store
.ordered_channels()
.map(|(_, channel)| channel)
.filter(|channel| !channel_store.channel_participants(channel.id).is_empty())
.flat_map(|channel| channel.parent_path.iter().copied().chain(Some(channel.id)))
.collect();
- channels.retain(|channel| active_channel_ids_or_ancestors.contains(&channel.id));
+ channels.retain(|channel| occupied_channel_ids_or_ancestors.contains(&channel.id));
}
if let Some(state) = &self.channel_editing_state
@@ -795,7 +817,7 @@ impl CollabPanel {
self.entries.push(ListEntry::ChannelEditor { depth: 0 });
}
- let should_respect_collapse = query.is_empty() && !self.filter_active_channels;
+ let should_respect_collapse = query.is_empty() && !self.filter_occupied_channels;
let mut collapse_depth = None;
for (idx, channel) in channels.into_iter().enumerate() {
@@ -1091,27 +1113,30 @@ impl CollabPanel {
room.read(cx).local_participant().role == proto::ChannelRole::Admin
});
+ let end_slot = if is_pending {
+ Label::new("Calling").color(Color::Muted).into_any_element()
+ } else if is_current_user {
+ IconButton::new("leave-call", IconName::Exit)
+ .icon_size(IconSize::Small)
+ .tooltip(Tooltip::text("Leave Call"))
+ .on_click(move |_, window, cx| Self::leave_call(window, cx))
+ .into_any_element()
+ } else if role == proto::ChannelRole::Guest {
+ Label::new("Guest").color(Color::Muted).into_any_element()
+ } else if role == proto::ChannelRole::Talker {
+ Label::new("Mic only")
+ .color(Color::Muted)
+ .into_any_element()
+ } else {
+ Empty.into_any_element()
+ };
+
ListItem::new(user.github_login.clone())
.start_slot(Avatar::new(user.avatar_uri.clone()))
.child(render_participant_name_and_handle(user))
.toggle_state(is_selected)
- .end_slot(if is_pending {
- Label::new("Calling").color(Color::Muted).into_any_element()
- } else if is_current_user {
- IconButton::new("leave-call", IconName::Exit)
- .style(ButtonStyle::Subtle)
- .on_click(move |_, window, cx| Self::leave_call(window, cx))
- .tooltip(Tooltip::text("Leave Call"))
- .into_any_element()
- } else if role == proto::ChannelRole::Guest {
- Label::new("Guest").color(Color::Muted).into_any_element()
- } else if role == proto::ChannelRole::Talker {
- Label::new("Mic only")
- .color(Color::Muted)
- .into_any_element()
- } else {
- div().into_any_element()
- })
+ .end_slot(end_slot)
+ .tooltip(Tooltip::text("Click to Follow"))
.when_some(peer_id, |el, peer_id| {
if role == proto::ChannelRole::Guest {
return el;
@@ -1156,6 +1181,7 @@ impl CollabPanel {
.into();
ListItem::new(project_id as usize)
+ .height(px(24.))
.toggle_state(is_selected)
.on_click(cx.listener(move |this, _, window, cx| {
this.workspace
@@ -1166,16 +1192,20 @@ impl CollabPanel {
"Failed to join project",
window,
cx,
- |_, _, _| None,
+ |error, _, _| Some(format!("{error:#}")),
);
})
.ok();
}))
.start_slot(
h_flex()
- .gap_1()
+ .gap_1p5()
.child(render_tree_branch(is_last, false, window, cx))
- .child(IconButton::new(0, IconName::Folder)),
+ .child(
+ Icon::new(IconName::Folder)
+ .size(IconSize::Small)
+ .color(Color::Muted),
+ ),
)
.child(Label::new(project_name.clone()))
.tooltip(Tooltip::text(format!("Open {}", project_name)))
@@ -1192,12 +1222,17 @@ impl CollabPanel {
let id = peer_id.map_or(usize::MAX, |id| id.as_u64() as usize);
ListItem::new(("screen", id))
+ .height(px(24.))
.toggle_state(is_selected)
.start_slot(
h_flex()
- .gap_1()
+ .gap_1p5()
.child(render_tree_branch(is_last, false, window, cx))
- .child(IconButton::new(0, IconName::Screen)),
+ .child(
+ Icon::new(IconName::Screen)
+ .size(IconSize::Small)
+ .color(Color::Muted),
+ ),
)
.child(Label::new("Screen"))
.when_some(peer_id, |this, _| {
@@ -1208,7 +1243,7 @@ impl CollabPanel {
})
.ok();
}))
- .tooltip(Tooltip::text("Open shared screen"))
+ .tooltip(Tooltip::text("Open Shared Screen"))
})
}
@@ -1232,7 +1267,9 @@ impl CollabPanel {
) -> impl IntoElement {
let channel_store = self.channel_store.read(cx);
let has_channel_buffer_changed = channel_store.has_channel_buffer_changed(channel_id);
+
ListItem::new("channel-notes")
+ .height(px(24.))
.toggle_state(is_selected)
.on_click(cx.listener(move |this, _, window, cx| {
this.open_channel_notes(channel_id, window, cx);
@@ -1240,17 +1277,25 @@ impl CollabPanel {
.start_slot(
h_flex()
.relative()
- .gap_1()
+ .gap_1p5()
.child(render_tree_branch(false, true, window, cx))
- .child(IconButton::new(0, IconName::File))
- .children(has_channel_buffer_changed.then(|| {
- div()
- .w_1p5()
- .absolute()
- .right(px(2.))
- .top(px(2.))
- .child(Indicator::dot().color(Color::Info))
- })),
+ .child(
+ h_flex()
+ .child(
+ Icon::new(IconName::Reader)
+ .size(IconSize::Small)
+ .color(Color::Muted),
+ )
+ .when(has_channel_buffer_changed, |this| {
+ this.child(
+ div()
+ .absolute()
+ .top_neg_0p5()
+ .right_0()
+ .child(Indicator::dot().color(Color::Info)),
+ )
+ }),
+ ),
)
.child(Label::new("notes"))
.tooltip(Tooltip::text("Open Channel Notes"))
@@ -1729,7 +1774,7 @@ impl CollabPanel {
"Failed to join project",
window,
cx,
- |_, _, _| None,
+ |error, _, _| Some(format!("{error:#}")),
);
}
}
@@ -1946,6 +1991,26 @@ impl CollabPanel {
self.channel_store.read(cx).is_channel_favorited(channel_id)
}
+ fn persist_filter_occupied_channels(&mut self, cx: &mut Context<Self>) {
+ let is_enabled = self.filter_occupied_channels;
+ let kvp_store = KeyValueStore::global(cx);
+ self.pending_filter_serialization = cx.background_spawn(
+ async move {
+ if is_enabled {
+ kvp_store
+ .write_kvp(FILTER_OCCUPIED_CHANNELS_KEY.to_string(), "1".to_string())
+ .await?;
+ } else {
+ kvp_store
+ .delete_kvp(FILTER_OCCUPIED_CHANNELS_KEY.to_string())
+ .await?;
+ }
+ anyhow::Ok(())
+ }
+ .log_err(),
+ );
+ }
+
fn persist_favorites(&mut self, cx: &mut Context<Self>) {
let favorite_ids: Vec<u64> = self
.channel_store
@@ -1955,11 +2020,11 @@ impl CollabPanel {
.map(|id| id.0)
.collect();
let kvp_store = KeyValueStore::global(cx);
- self.pending_serialization = cx.background_spawn(
+ self.pending_favorites_serialization = cx.background_spawn(
async move {
let json = serde_json::to_string(&favorite_ids)?;
kvp_store
- .write_kvp("favorite_channels".to_string(), json)
+ .write_kvp(FAVORITE_CHANNELS_KEY.to_string(), json)
.await?;
anyhow::Ok(())
}
@@ -2819,14 +2884,15 @@ impl CollabPanel {
Some(
h_flex()
.child(
- IconButton::new("filter-active-channels", IconName::ListFilter)
+ IconButton::new("filter-occupied-channels", IconName::ListFilter)
.icon_size(IconSize::Small)
- .toggle_state(self.filter_active_channels)
+ .toggle_state(self.filter_occupied_channels)
.on_click(cx.listener(|this, _, _window, cx| {
- this.filter_active_channels = !this.filter_active_channels;
+ this.filter_occupied_channels = !this.filter_occupied_channels;
this.update_entries(true, cx);
+ this.persist_filter_occupied_channels(cx);
}))
- .tooltip(Tooltip::text(if self.filter_active_channels {
+ .tooltip(Tooltip::text(if self.filter_occupied_channels {
"Show All Channels"
} else {
"Show Occupied Channels"
@@ -3144,10 +3210,14 @@ impl CollabPanel {
(IconName::Star, Color::Default, "Add to Favorites")
};
+ let height = px(24.);
+
h_flex()
.id(ix)
.group("")
+ .h(height)
.w_full()
+ .overflow_hidden()
.when(!channel.is_root_channel(), |el| {
el.on_drag(channel.clone(), move |channel, _, _, cx| {
cx.new(|_| DraggedChannelView {
@@ -3175,6 +3245,7 @@ impl CollabPanel {
)
.child(
ListItem::new(ix)
+ .height(height)
// Add one level of depth for the disclosure arrow.
.indent_level(depth + 1)
.indent_step_size(px(20.))
@@ -3256,12 +3327,13 @@ impl CollabPanel {
.child(
h_flex()
.visible_on_hover("")
+ .h_full()
.absolute()
.right_0()
.px_1()
.gap_px()
- .bg(cx.theme().colors().background)
.rounded_l_md()
+ .bg(cx.theme().colors().background)
.child({
let focus_handle = self.focus_handle.clone();
IconButton::new("channel_favorite", favorite_icon)
@@ -3335,9 +3407,8 @@ fn render_tree_branch(
) -> impl IntoElement {
let rem_size = window.rem_size();
let line_height = window.text_style().line_height_in_pixels(rem_size);
- let width = rem_size * 1.5;
let thickness = px(1.);
- let color = cx.theme().colors().text;
+ let color = cx.theme().colors().icon_disabled;
canvas(
|_, _, _| {},
@@ -3367,8 +3438,8 @@ fn render_tree_branch(
));
},
)
- .w(width)
- .h(line_height)
+ .w(rem_size)
+ .h(line_height - px(2.))
}
fn render_participant_name_and_handle(user: &User) -> impl IntoElement {
@@ -2733,10 +2733,7 @@ mod tests {
assert!(refreshed);
assert_eq!(provider.access_token().as_deref(), Some("new-access"));
- let notified_session = rx
- .try_next()
- .unwrap()
- .expect("channel should have a session");
+ let notified_session = rx.try_recv().expect("channel should have a session");
assert_eq!(notified_session.tokens.access_token, "new-access");
assert_eq!(
notified_session.tokens.refresh_token.as_deref(),
@@ -2768,10 +2765,7 @@ mod tests {
let refreshed = provider.try_refresh().await.unwrap();
assert!(refreshed);
- let notified_session = rx
- .try_next()
- .unwrap()
- .expect("channel should have a session");
+ let notified_session = rx.try_recv().expect("channel should have a session");
assert_eq!(notified_session.tokens.access_token, "new-access");
assert_eq!(
notified_session.tokens.refresh_token.as_deref(),
@@ -1045,7 +1045,7 @@ mod tests {
});
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
- assert!(copilot_requests.try_next().is_err());
+ assert!(copilot_requests.try_recv().is_err());
_ = editor.update(cx, |editor, window, cx| {
editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
@@ -1055,7 +1055,7 @@ mod tests {
});
executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT);
- assert!(copilot_requests.try_next().is_ok());
+ assert!(copilot_requests.try_recv().is_ok());
}
fn handle_copilot_completion_request(
@@ -13,9 +13,5 @@ path = "src/credentials_provider.rs"
[dependencies]
anyhow.workspace = true
-futures.workspace = true
gpui.workspace = true
-paths.workspace = true
-release_channel.workspace = true
serde.workspace = true
-serde_json.workspace = true
@@ -1,26 +1,8 @@
-use std::collections::HashMap;
use std::future::Future;
-use std::path::PathBuf;
use std::pin::Pin;
-use std::sync::{Arc, LazyLock};
use anyhow::Result;
-use futures::FutureExt as _;
-use gpui::{App, AsyncApp};
-use release_channel::ReleaseChannel;
-
-/// An environment variable whose presence indicates that the system keychain
-/// should be used in development.
-///
-/// By default, running Zed in development uses the development credentials
-/// provider. Setting this environment variable allows you to interact with the
-/// system keychain (for instance, if you need to test something).
-///
-/// Only works in development. Setting this environment variable in other
-/// release channels is a no-op.
-static ZED_DEVELOPMENT_USE_KEYCHAIN: LazyLock<bool> = LazyLock::new(|| {
- std::env::var("ZED_DEVELOPMENT_USE_KEYCHAIN").is_ok_and(|value| !value.is_empty())
-});
+use gpui::AsyncApp;
/// A provider for credentials.
///
@@ -50,150 +32,3 @@ pub trait CredentialsProvider: Send + Sync {
cx: &'a AsyncApp,
) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>>;
}
-
-impl dyn CredentialsProvider {
- /// Returns the global [`CredentialsProvider`].
- pub fn global(cx: &App) -> Arc<Self> {
- // The `CredentialsProvider` trait has `Send + Sync` bounds on it, so it
- // seems like this is a false positive from Clippy.
- #[allow(clippy::arc_with_non_send_sync)]
- Self::new(cx)
- }
-
- fn new(cx: &App) -> Arc<Self> {
- let use_development_provider = match ReleaseChannel::try_global(cx) {
- Some(ReleaseChannel::Dev) => {
- // In development we default to using the development
- // credentials provider to avoid getting spammed by relentless
- // keychain access prompts.
- //
- // However, if the `ZED_DEVELOPMENT_USE_KEYCHAIN` environment
- // variable is set, we will use the actual keychain.
- !*ZED_DEVELOPMENT_USE_KEYCHAIN
- }
- Some(ReleaseChannel::Nightly | ReleaseChannel::Preview | ReleaseChannel::Stable)
- | None => false,
- };
-
- if use_development_provider {
- Arc::new(DevelopmentCredentialsProvider::new())
- } else {
- Arc::new(KeychainCredentialsProvider)
- }
- }
-}
-
-/// A credentials provider that stores credentials in the system keychain.
-struct KeychainCredentialsProvider;
-
-impl CredentialsProvider for KeychainCredentialsProvider {
- fn read_credentials<'a>(
- &'a self,
- url: &'a str,
- cx: &'a AsyncApp,
- ) -> Pin<Box<dyn Future<Output = Result<Option<(String, Vec<u8>)>>> + 'a>> {
- async move { cx.update(|cx| cx.read_credentials(url)).await }.boxed_local()
- }
-
- fn write_credentials<'a>(
- &'a self,
- url: &'a str,
- username: &'a str,
- password: &'a [u8],
- cx: &'a AsyncApp,
- ) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {
- async move {
- cx.update(move |cx| cx.write_credentials(url, username, password))
- .await
- }
- .boxed_local()
- }
-
- fn delete_credentials<'a>(
- &'a self,
- url: &'a str,
- cx: &'a AsyncApp,
- ) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {
- async move { cx.update(move |cx| cx.delete_credentials(url)).await }.boxed_local()
- }
-}
-
-/// A credentials provider that stores credentials in a local file.
-///
-/// This MUST only be used in development, as this is not a secure way of storing
-/// credentials on user machines.
-///
-/// Its existence is purely to work around the annoyance of having to constantly
-/// re-allow access to the system keychain when developing Zed.
-struct DevelopmentCredentialsProvider {
- path: PathBuf,
-}
-
-impl DevelopmentCredentialsProvider {
- fn new() -> Self {
- let path = paths::config_dir().join("development_credentials");
-
- Self { path }
- }
-
- fn load_credentials(&self) -> Result<HashMap<String, (String, Vec<u8>)>> {
- let json = std::fs::read(&self.path)?;
- let credentials: HashMap<String, (String, Vec<u8>)> = serde_json::from_slice(&json)?;
-
- Ok(credentials)
- }
-
- fn save_credentials(&self, credentials: &HashMap<String, (String, Vec<u8>)>) -> Result<()> {
- let json = serde_json::to_string(credentials)?;
- std::fs::write(&self.path, json)?;
-
- Ok(())
- }
-}
-
-impl CredentialsProvider for DevelopmentCredentialsProvider {
- fn read_credentials<'a>(
- &'a self,
- url: &'a str,
- _cx: &'a AsyncApp,
- ) -> Pin<Box<dyn Future<Output = Result<Option<(String, Vec<u8>)>>> + 'a>> {
- async move {
- Ok(self
- .load_credentials()
- .unwrap_or_default()
- .get(url)
- .cloned())
- }
- .boxed_local()
- }
-
- fn write_credentials<'a>(
- &'a self,
- url: &'a str,
- username: &'a str,
- password: &'a [u8],
- _cx: &'a AsyncApp,
- ) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {
- async move {
- let mut credentials = self.load_credentials().unwrap_or_default();
- credentials.insert(url.to_string(), (username.to_string(), password.to_vec()));
-
- self.save_credentials(&credentials)
- }
- .boxed_local()
- }
-
- fn delete_credentials<'a>(
- &'a self,
- url: &'a str,
- _cx: &'a AsyncApp,
- ) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {
- async move {
- let mut credentials = self.load_credentials()?;
- credentials.remove(url);
-
- self.save_credentials(&credentials)
- }
- .boxed_local()
- }
-}
@@ -161,9 +161,7 @@ impl CsvPreviewView {
editor,
|this: &mut CsvPreviewView, _editor, event: &EditorEvent, cx| {
match event {
- EditorEvent::Edited { .. }
- | EditorEvent::DirtyChanged
- | EditorEvent::ExcerptsEdited { .. } => {
+ EditorEvent::Edited { .. } | EditorEvent::DirtyChanged => {
this.parse_csv_from_active_editor(true, cx);
}
_ => {}
@@ -1086,6 +1086,7 @@ impl SearchableItem for DapLogView {
// DAP log is read-only.
replacement: false,
selection: false,
+ select_all: true,
}
}
fn active_match_index(
@@ -299,7 +299,7 @@ pub fn init(cx: &mut App) {
return;
}
maybe!({
- let (buffer, position, _) = editor
+ let (buffer, position) = editor
.update(cx, |editor, cx| {
let cursor_point: language::Point = editor
.selections
@@ -7,8 +7,8 @@ use anyhow::Result;
use collections::HashMap;
use dap::{CompletionItem, CompletionItemType, OutputEvent};
use editor::{
- Bias, CompletionProvider, Editor, EditorElement, EditorMode, EditorStyle, ExcerptId,
- HighlightKey, MultiBufferOffset, SizingBehavior,
+ Bias, CompletionProvider, Editor, EditorElement, EditorMode, EditorStyle, HighlightKey,
+ MultiBufferOffset, SizingBehavior,
};
use fuzzy::StringMatchCandidate;
use gpui::{
@@ -528,7 +528,6 @@ struct ConsoleQueryBarCompletionProvider(WeakEntity<Console>);
impl CompletionProvider for ConsoleQueryBarCompletionProvider {
fn completions(
&self,
- _excerpt_id: ExcerptId,
buffer: &Entity<Buffer>,
buffer_position: language::Anchor,
_trigger: editor::CompletionContext,
@@ -60,7 +60,8 @@ pub(crate) enum ShutdownAction {
#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub(crate) struct MountDefinition {
- pub(crate) source: String,
+ #[serde(default)]
+ pub(crate) source: Option<String>,
pub(crate) target: String,
#[serde(rename = "type")]
pub(crate) mount_type: Option<String>,
@@ -68,19 +69,23 @@ pub(crate) struct MountDefinition {
impl Display for MountDefinition {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- write!(
- f,
- "type={},source={},target={},consistency=cached",
- self.mount_type.clone().unwrap_or_else(|| {
- if self.source.starts_with('/') {
- "bind".to_string()
- } else {
- "volume".to_string()
+ let mount_type = self.mount_type.clone().unwrap_or_else(|| {
+ if let Some(source) = &self.source {
+ if source.starts_with('/')
+ || source.starts_with("\\\\")
+ || source.get(1..3) == Some(":\\")
+ || source.get(1..3) == Some(":/")
+ {
+ return "bind".to_string();
}
- }),
- self.source,
- self.target
- )
+ }
+ "volume".to_string()
+ });
+ write!(f, "type={}", mount_type)?;
+ if let Some(source) = &self.source {
+ write!(f, ",source={}", source)?;
+ }
+ write!(f, ",target={},consistency=cached", self.target)
}
}
@@ -253,13 +258,6 @@ impl DevContainer {
}
return DevContainerBuildType::None;
}
-
- pub(crate) fn has_features(&self) -> bool {
- self.features
- .as_ref()
- .map(|features| !features.is_empty())
- .unwrap_or(false)
- }
}
// Custom deserializer that parses the entire customizations object as a
@@ -450,8 +448,6 @@ where
}
}
- let source = source
- .ok_or_else(|| D::Error::custom(format!("mount string missing 'source': {}", s)))?;
let target = target
.ok_or_else(|| D::Error::custom(format!("mount string missing 'target': {}", s)))?;
@@ -505,9 +501,6 @@ where
}
}
- let source = source.ok_or_else(|| {
- D::Error::custom(format!("mount string missing 'source': {}", s))
- })?;
let target = target.ok_or_else(|| {
D::Error::custom(format!("mount string missing 'target': {}", s))
})?;
@@ -876,7 +869,7 @@ mod test {
])),
container_user: Some("myUser".to_string()),
mounts: Some(vec![MountDefinition {
- source: "/localfolder/app".to_string(),
+ source: Some("/localfolder/app".to_string()),
target: "/workspaces/app".to_string(),
mount_type: Some("volume".to_string()),
}]),
@@ -885,7 +878,7 @@ mod test {
override_command: Some(true),
workspace_folder: Some("/workspaces".to_string()),
workspace_mount: Some(MountDefinition {
- source: "/app".to_string(),
+ source: Some("/app".to_string()),
target: "/workspaces/app".to_string(),
mount_type: Some("bind".to_string())
}),
@@ -1319,12 +1312,12 @@ mod test {
container_user: Some("myUser".to_string()),
mounts: Some(vec![
MountDefinition {
- source: "/localfolder/app".to_string(),
+ source: Some("/localfolder/app".to_string()),
target: "/workspaces/app".to_string(),
mount_type: Some("volume".to_string()),
},
MountDefinition {
- source: "dev-containers-cli-bashhistory".to_string(),
+ source: Some("dev-containers-cli-bashhistory".to_string()),
target: "/home/node/commandhistory".to_string(),
mount_type: None,
}
@@ -1334,7 +1327,7 @@ mod test {
override_command: Some(true),
workspace_folder: Some("/workspaces".to_string()),
workspace_mount: Some(MountDefinition {
- source: "/folder".to_string(),
+ source: Some("/folder".to_string()),
target: "/workspace".to_string(),
mount_type: Some("bind".to_string())
}),
@@ -1355,4 +1348,65 @@ mod test {
assert_eq!(devcontainer.build_type(), DevContainerBuildType::Dockerfile);
}
+
+ #[test]
+ fn mount_definition_should_use_bind_type_for_unix_absolute_paths() {
+ let mount = MountDefinition {
+ source: Some("/home/user/project".to_string()),
+ target: "/workspaces/project".to_string(),
+ mount_type: None,
+ };
+
+ let rendered = mount.to_string();
+
+ assert!(
+ rendered.starts_with("type=bind,"),
+ "Expected mount type 'bind' for Unix absolute path, but got: {rendered}"
+ );
+ }
+
+ #[test]
+ fn mount_definition_should_use_bind_type_for_windows_unc_paths() {
+ let mount = MountDefinition {
+ source: Some("\\\\server\\share\\project".to_string()),
+ target: "/workspaces/project".to_string(),
+ mount_type: None,
+ };
+
+ let rendered = mount.to_string();
+
+ assert!(
+ rendered.starts_with("type=bind,"),
+ "Expected mount type 'bind' for Windows UNC path, but got: {rendered}"
+ );
+ }
+
+ #[test]
+ fn mount_definition_should_use_bind_type_for_windows_absolute_paths() {
+ let mount = MountDefinition {
+ source: Some("C:\\Users\\mrg\\cli".to_string()),
+ target: "/workspaces/cli".to_string(),
+ mount_type: None,
+ };
+
+ let rendered = mount.to_string();
+
+ assert!(
+ rendered.starts_with("type=bind,"),
+ "Expected mount type 'bind' for Windows absolute path, but got: {rendered}"
+ );
+ }
+
+ #[test]
+ fn mount_definition_should_omit_source_when_none() {
+ let mount = MountDefinition {
+ source: None,
+ target: "/tmp".to_string(),
+ mount_type: Some("tmpfs".to_string()),
+ };
+
+ let rendered = mount.to_string();
+
+ assert_eq!(rendered, "type=tmpfs,target=/tmp,consistency=cached");
+ }
}
@@ -20,7 +20,8 @@ use crate::{
},
docker::{
Docker, DockerClient, DockerComposeConfig, DockerComposeService, DockerComposeServiceBuild,
- DockerComposeVolume, DockerInspect, DockerPs, get_remote_dir_from_config,
+ DockerComposeServicePort, DockerComposeVolume, DockerInspect, DockerPs,
+ get_remote_dir_from_config,
},
features::{DevContainerFeatureJson, FeatureManifest, parse_oci_feature_ref},
get_oci_token,
@@ -316,13 +317,6 @@ impl DevContainerManifest {
let root_image_tag = self.get_base_image_from_config().await?;
let root_image = self.docker_client.inspect(&root_image_tag).await?;
- if dev_container.build_type() == DevContainerBuildType::Image
- && !dev_container.has_features()
- {
- log::debug!("No resources to download. Proceeding with just the image");
- return Ok(());
- }
-
let temp_base = std::env::temp_dir().join("devcontainer-zed");
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
@@ -700,10 +694,29 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
}
let dev_container = self.dev_container();
match dev_container.build_type() {
- DevContainerBuildType::Image | DevContainerBuildType::Dockerfile => {
+ DevContainerBuildType::Image => {
+ let built_docker_image = self.build_docker_image().await?;
+ let Some(base_image) = dev_container.image.as_ref() else {
+ log::error!("Dev container is using and image which can't be referenced");
+ return Err(DevContainerError::DevContainerParseFailed);
+ };
+ let built_docker_image = self
+ .update_remote_user_uid(built_docker_image, base_image)
+ .await?;
+
+ let resources = self.build_merged_resources(built_docker_image)?;
+ Ok(DevContainerBuildResources::Docker(resources))
+ }
+ DevContainerBuildType::Dockerfile => {
let built_docker_image = self.build_docker_image().await?;
+ let Some(features_build_info) = &self.features_build_info else {
+ log::error!(
+ "Can't attempt to build update UID dockerfile before initial docker build"
+ );
+ return Err(DevContainerError::DevContainerParseFailed);
+ };
let built_docker_image = self
- .update_remote_user_uid(built_docker_image, None)
+ .update_remote_user_uid(built_docker_image, &features_build_info.image_tag)
.await?;
let resources = self.build_merged_resources(built_docker_image)?;
@@ -815,7 +828,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
let (main_service_name, main_service) =
find_primary_service(&docker_compose_resources, self)?;
- let built_service_image = if main_service
+ let (built_service_image, built_service_image_tag) = if main_service
.build
.as_ref()
.map(|b| b.dockerfile.as_ref())
@@ -870,7 +883,13 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
labels: None,
build: Some(DockerComposeServiceBuild {
context: Some(
- features_build_info.empty_context_dir.display().to_string(),
+ main_service
+ .build
+ .as_ref()
+ .and_then(|b| b.context.clone())
+ .unwrap_or_else(|| {
+ features_build_info.empty_context_dir.display().to_string()
+ }),
),
dockerfile: Some(dockerfile_path.display().to_string()),
args: Some(build_args),
@@ -904,16 +923,19 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
self.docker_client
.docker_compose_build(&docker_compose_resources.files, &self.project_name())
.await?;
- self.docker_client
- .inspect(&features_build_info.image_tag)
- .await?
+ (
+ self.docker_client
+ .inspect(&features_build_info.image_tag)
+ .await?,
+ &features_build_info.image_tag,
+ )
} else if let Some(image) = &main_service.image {
if dev_container
.features
.as_ref()
.is_none_or(|features| features.is_empty())
{
- self.docker_client.inspect(image).await?
+ (self.docker_client.inspect(image).await?, image)
} else {
if !supports_buildkit {
self.build_feature_content_image().await?;
@@ -993,9 +1015,12 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
.docker_compose_build(&docker_compose_resources.files, &self.project_name())
.await?;
- self.docker_client
- .inspect(&features_build_info.image_tag)
- .await?
+ (
+ self.docker_client
+ .inspect(&features_build_info.image_tag)
+ .await?,
+ &features_build_info.image_tag,
+ )
}
} else {
log::error!("Docker compose must have either image or dockerfile defined");
@@ -1003,7 +1028,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
};
let built_service_image = self
- .update_remote_user_uid(built_service_image, Some(&features_build_info.image_tag))
+ .update_remote_user_uid(built_service_image, built_service_image_tag)
.await?;
let resources = self.build_merged_resources(built_service_image)?;
@@ -1052,7 +1077,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
network_mode_service: Option<&str>,
resources: DockerBuildResources,
) -> Result<DockerComposeConfig, DevContainerError> {
- let mut runtime_labels = vec![];
+ let mut runtime_labels = HashMap::new();
if let Some(metadata) = &resources.image.config.labels.metadata {
let serialized_metadata = serde_json_lenient::to_string(metadata).map_err(|e| {
@@ -1060,14 +1085,11 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
DevContainerError::ContainerNotValid(resources.image.id.clone())
})?;
- runtime_labels.push(format!(
- "{}={}",
- "devcontainer.metadata", serialized_metadata
- ));
+ runtime_labels.insert("devcontainer.metadata".to_string(), serialized_metadata);
}
for (k, v) in self.identifying_labels() {
- runtime_labels.push(format!("{}={}", k, v));
+ runtime_labels.insert(k.to_string(), v.to_string());
}
let config_volumes: HashMap<String, DockerComposeVolume> = resources
@@ -1076,11 +1098,12 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
.filter_map(|mount| {
if let Some(mount_type) = &mount.mount_type
&& mount_type.to_lowercase() == "volume"
+ && let Some(source) = &mount.source
{
Some((
- mount.source.clone(),
+ source.clone(),
DockerComposeVolume {
- name: mount.source.clone(),
+ name: source.clone(),
},
))
} else {
@@ -1140,18 +1163,30 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
// If the main service uses a different service's network bridge, append to that service's ports instead
if let Some(network_service_name) = network_mode_service {
if let Some(service) = service_declarations.get_mut(network_service_name) {
- service.ports.push(format!("{port}:{port}"));
+ service.ports.push(DockerComposeServicePort {
+ target: port.clone(),
+ published: port.clone(),
+ ..Default::default()
+ });
} else {
service_declarations.insert(
network_service_name.to_string(),
DockerComposeService {
- ports: vec![format!("{port}:{port}")],
+ ports: vec![DockerComposeServicePort {
+ target: port.clone(),
+ published: port.clone(),
+ ..Default::default()
+ }],
..Default::default()
},
);
}
} else {
- main_service.ports.push(format!("{port}:{port}"));
+ main_service.ports.push(DockerComposeServicePort {
+ target: port.clone(),
+ published: port.clone(),
+ ..Default::default()
+ });
}
}
let other_service_ports: Vec<(&str, &str)> = forward_ports
@@ -1174,12 +1209,20 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
.collect();
for (service_name, port) in other_service_ports {
if let Some(service) = service_declarations.get_mut(service_name) {
- service.ports.push(format!("{port}:{port}"));
+ service.ports.push(DockerComposeServicePort {
+ target: port.to_string(),
+ published: port.to_string(),
+ ..Default::default()
+ });
} else {
service_declarations.insert(
service_name.to_string(),
DockerComposeService {
- ports: vec![format!("{port}:{port}")],
+ ports: vec![DockerComposeServicePort {
+ target: port.to_string(),
+ published: port.to_string(),
+ ..Default::default()
+ }],
..Default::default()
},
);
@@ -1189,18 +1232,30 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
if let Some(port) = &self.dev_container().app_port {
if let Some(network_service_name) = network_mode_service {
if let Some(service) = service_declarations.get_mut(network_service_name) {
- service.ports.push(format!("{port}:{port}"));
+ service.ports.push(DockerComposeServicePort {
+ target: port.clone(),
+ published: port.clone(),
+ ..Default::default()
+ });
} else {
service_declarations.insert(
network_service_name.to_string(),
DockerComposeService {
- ports: vec![format!("{port}:{port}")],
+ ports: vec![DockerComposeServicePort {
+ target: port.clone(),
+ published: port.clone(),
+ ..Default::default()
+ }],
..Default::default()
},
);
}
} else {
- main_service.ports.push(format!("{port}:{port}"));
+ main_service.ports.push(DockerComposeServicePort {
+ target: port.clone(),
+ published: port.clone(),
+ ..Default::default()
+ });
}
}
@@ -1282,7 +1337,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
async fn update_remote_user_uid(
&self,
image: DockerInspect,
- _override_tag: Option<&str>,
+ _base_image: &str,
) -> Result<DockerInspect, DevContainerError> {
Ok(image)
}
@@ -1290,7 +1345,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
async fn update_remote_user_uid(
&self,
image: DockerInspect,
- override_tag: Option<&str>,
+ base_image: &str,
) -> Result<DockerInspect, DevContainerError> {
let dev_container = self.dev_container();
@@ -1364,18 +1419,13 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true
DevContainerError::FilesystemError
})?;
- let updated_image_tag = override_tag
- .map(|t| t.to_string())
- .unwrap_or_else(|| format!("{}-uid", features_build_info.image_tag));
+ let updated_image_tag = format!("{}-uid", features_build_info.image_tag);
let mut command = Command::new(self.docker_client.docker_cli());
command.args(["build"]);
command.args(["-f", &dockerfile_path.display().to_string()]);
command.args(["-t", &updated_image_tag]);
- command.args([
- "--build-arg",
- &format!("BASE_IMAGE={}", features_build_info.image_tag),
- ]);
+ command.args(["--build-arg", &format!("BASE_IMAGE={}", base_image)]);
command.args(["--build-arg", &format!("REMOTE_USER={}", remote_user)]);
command.args(["--build-arg", &format!("NEW_UID={}", host_uid)]);
command.args(["--build-arg", &format!("NEW_GID={}", host_gid)]);
@@ -1701,7 +1751,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
};
Ok(MountDefinition {
- source: self.local_workspace_folder(),
+ source: Some(self.local_workspace_folder()),
target: format!("/workspaces/{}", project_directory_name.display()),
mount_type: None,
})
@@ -2292,23 +2342,21 @@ fn get_remote_user_from_config(
{
return Ok(user.clone());
}
- let Some(metadata) = &docker_config.config.labels.metadata else {
- log::error!("Could not locate metadata");
- return Err(DevContainerError::ContainerNotValid(
- docker_config.id.clone(),
- ));
- };
- for metadatum in metadata {
- if let Some(remote_user) = metadatum.get("remoteUser") {
- if let Some(remote_user_str) = remote_user.as_str() {
- return Ok(remote_user_str.to_string());
+ if let Some(metadata) = &docker_config.config.labels.metadata {
+ for metadatum in metadata {
+ if let Some(remote_user) = metadatum.get("remoteUser") {
+ if let Some(remote_user_str) = remote_user.as_str() {
+ return Ok(remote_user_str.to_string());
+ }
}
}
}
- log::error!("Could not locate the remote user");
- Err(DevContainerError::ContainerNotValid(
- docker_config.id.clone(),
- ))
+ if let Some(image_user) = &docker_config.config.image_user {
+ if !image_user.is_empty() {
+ return Ok(image_user.to_string());
+ }
+ }
+ Ok("root".to_string())
}
// This should come from spec - see the docs
@@ -2332,7 +2380,7 @@ fn get_container_user_from_config(
return Ok(image_user.to_string());
}
- Err(DevContainerError::DevContainerParseFailed)
+ Ok("root".to_string())
}
#[cfg(test)]
@@ -2356,6 +2404,8 @@ mod test {
use serde_json_lenient::Value;
use util::{command::Command, paths::SanitizedPath};
+ #[cfg(not(target_os = "windows"))]
+ use crate::docker::DockerComposeServicePort;
use crate::{
DevContainerConfig, DevContainerContext,
command_json::CommandRunner,
@@ -3502,6 +3552,27 @@ ENV DOCKER_BUILDKIT=1
"#
);
+ let build_override = files
+ .iter()
+ .find(|f| {
+ f.file_name()
+ .is_some_and(|s| s.display().to_string() == "docker_compose_build.json")
+ })
+ .expect("to be found");
+ let build_override = test_dependencies.fs.load(build_override).await.unwrap();
+ let build_config: DockerComposeConfig =
+ serde_json_lenient::from_str(&build_override).unwrap();
+ let build_context = build_config
+ .services
+ .get("app")
+ .and_then(|s| s.build.as_ref())
+ .and_then(|b| b.context.clone())
+ .expect("build override should have a context");
+ assert_eq!(
+ build_context, ".",
+ "build override should preserve the original build context from docker-compose.yml"
+ );
+
let runtime_override = files
.iter()
.find(|f| {
@@ -3526,14 +3597,14 @@ ENV DOCKER_BUILDKIT=1
cap_add: Some(vec!["SYS_PTRACE".to_string()]),
security_opt: Some(vec!["seccomp=unconfined".to_string()]),
privileged: Some(true),
- labels: Some(vec![
- "devcontainer.metadata=[{\"remoteUser\":\"vscode\"}]".to_string(),
- "devcontainer.local_folder=/path/to/local/project".to_string(),
- "devcontainer.config_file=/path/to/local/project/.devcontainer/devcontainer.json".to_string()
- ]),
+ labels: Some(HashMap::from([
+ ("devcontainer.metadata".to_string(), "[{\"remoteUser\":\"vscode\"}]".to_string()),
+ ("devcontainer.local_folder".to_string(), "/path/to/local/project".to_string()),
+ ("devcontainer.config_file".to_string(), "/path/to/local/project/.devcontainer/devcontainer.json".to_string())
+ ])),
volumes: vec![
MountDefinition {
- source: "dind-var-lib-docker-42dad4b4ca7b8ced".to_string(),
+ source: Some("dind-var-lib-docker-42dad4b4ca7b8ced".to_string()),
target: "/var/lib/docker".to_string(),
mount_type: Some("volume".to_string())
}
@@ -3545,10 +3616,26 @@ ENV DOCKER_BUILDKIT=1
"db".to_string(),
DockerComposeService {
ports: vec![
- "8083:8083".to_string(),
- "5432:5432".to_string(),
- "1234:1234".to_string(),
- "8084:8084".to_string()
+ DockerComposeServicePort {
+ target: "8083".to_string(),
+ published: "8083".to_string(),
+ ..Default::default()
+ },
+ DockerComposeServicePort {
+ target: "5432".to_string(),
+ published: "5432".to_string(),
+ ..Default::default()
+ },
+ DockerComposeServicePort {
+ target: "1234".to_string(),
+ published: "1234".to_string(),
+ ..Default::default()
+ },
+ DockerComposeServicePort {
+ target: "8084".to_string(),
+ published: "8084".to_string(),
+ ..Default::default()
+ },
],
..Default::default()
},
@@ -4250,6 +4337,175 @@ chmod +x ./install.sh
}))
}
+ #[cfg(not(target_os = "windows"))]
+ #[gpui::test]
+ async fn test_spawns_devcontainer_with_plain_image(cx: &mut TestAppContext) {
+ cx.executor().allow_parking();
+ env_logger::try_init().ok();
+ let given_devcontainer_contents = r#"
+ {
+ "name": "cli-${devcontainerId}",
+ "image": "test_image:latest",
+ }
+ "#;
+
+ let (test_dependencies, mut devcontainer_manifest) =
+ init_default_devcontainer_manifest(cx, given_devcontainer_contents)
+ .await
+ .unwrap();
+
+ devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+ let _devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap();
+
+ let files = test_dependencies.fs.files();
+ let uid_dockerfile = files
+ .iter()
+ .find(|f| {
+ f.file_name()
+ .is_some_and(|s| s.display().to_string() == "updateUID.Dockerfile")
+ })
+ .expect("to be found");
+ let uid_dockerfile = test_dependencies.fs.load(uid_dockerfile).await.unwrap();
+
+ assert_eq!(
+ &uid_dockerfile,
+ r#"ARG BASE_IMAGE
+FROM $BASE_IMAGE
+
+USER root
+
+ARG REMOTE_USER
+ARG NEW_UID
+ARG NEW_GID
+SHELL ["/bin/sh", "-c"]
+RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \
+ eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \
+ eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \
+ if [ -z "$OLD_UID" ]; then \
+ echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \
+ elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \
+ echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \
+ elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \
+ echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \
+ else \
+ if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \
+ FREE_GID=65532; \
+ while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \
+ echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \
+ sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \
+ fi; \
+ echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \
+ sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \
+ if [ "$OLD_GID" != "$NEW_GID" ]; then \
+ sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \
+ fi; \
+ chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \
+ fi;
+
+ARG IMAGE_USER
+USER $IMAGE_USER
+
+# Ensure that /etc/profile does not clobber the existing path
+RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
+"#
+ );
+ }
+
+ #[cfg(not(target_os = "windows"))]
+ #[gpui::test]
+ async fn test_spawns_devcontainer_with_docker_compose_and_plain_image(cx: &mut TestAppContext) {
+ cx.executor().allow_parking();
+ env_logger::try_init().ok();
+ let given_devcontainer_contents = r#"
+ {
+ "name": "cli-${devcontainerId}",
+ "dockerComposeFile": "docker-compose-plain.yml",
+ "service": "app",
+ }
+ "#;
+
+ let (test_dependencies, mut devcontainer_manifest) =
+ init_default_devcontainer_manifest(cx, given_devcontainer_contents)
+ .await
+ .unwrap();
+
+ test_dependencies
+ .fs
+ .atomic_write(
+ PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/docker-compose-plain.yml"),
+ r#"
+services:
+ app:
+ image: test_image:latest
+ command: sleep infinity
+ volumes:
+ - ..:/workspace:cached
+ "#
+ .trim()
+ .to_string(),
+ )
+ .await
+ .unwrap();
+
+ devcontainer_manifest.parse_nonremote_vars().unwrap();
+
+ let _devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap();
+
+ let files = test_dependencies.fs.files();
+ let uid_dockerfile = files
+ .iter()
+ .find(|f| {
+ f.file_name()
+ .is_some_and(|s| s.display().to_string() == "updateUID.Dockerfile")
+ })
+ .expect("to be found");
+ let uid_dockerfile = test_dependencies.fs.load(uid_dockerfile).await.unwrap();
+
+ assert_eq!(
+ &uid_dockerfile,
+ r#"ARG BASE_IMAGE
+FROM $BASE_IMAGE
+
+USER root
+
+ARG REMOTE_USER
+ARG NEW_UID
+ARG NEW_GID
+SHELL ["/bin/sh", "-c"]
+RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \
+ eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \
+ eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \
+ if [ -z "$OLD_UID" ]; then \
+ echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \
+ elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \
+ echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \
+ elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \
+ echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \
+ else \
+ if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \
+ FREE_GID=65532; \
+ while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \
+ echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \
+ sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \
+ fi; \
+ echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \
+ sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \
+ if [ "$OLD_GID" != "$NEW_GID" ]; then \
+ sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \
+ fi; \
+ chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \
+ fi;
+
+ARG IMAGE_USER
+USER $IMAGE_USER
+
+# Ensure that /etc/profile does not clobber the existing path
+RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true
+"#
+ );
+ }
+
pub(crate) struct RecordedExecCommand {
pub(crate) _container_id: String,
pub(crate) _remote_folder: String,
@@ -4372,6 +4628,24 @@ chmod +x ./install.sh
state: None,
});
}
+ if id == "test_image:latest" {
+ return Ok(DockerInspect {
+ id: "sha256:610e6cfca95280188b021774f8cf69dd6f49bdb6eebc34c5ee2010f4d51cc104"
+ .to_string(),
+ config: DockerInspectConfig {
+ labels: DockerConfigLabels {
+ metadata: Some(vec![HashMap::from([(
+ "remoteUser".to_string(),
+ Value::String("node".to_string()),
+ )])]),
+ },
+ env: Vec::new(),
+ image_user: Some("root".to_string()),
+ },
+ mounts: None,
+ state: None,
+ });
+ }
Err(DevContainerError::DockerNotAvailable)
}
@@ -4398,7 +4672,7 @@ chmod +x ./install.sh
additional_contexts: None,
}),
volumes: vec![MountDefinition {
- source: "../..".to_string(),
+ source: Some("../..".to_string()),
target: "/workspaces".to_string(),
mount_type: Some("bind".to_string()),
}],
@@ -4411,7 +4685,7 @@ chmod +x ./install.sh
DockerComposeService {
image: Some("postgres:14.1".to_string()),
volumes: vec![MountDefinition {
- source: "postgres-data".to_string(),
+ source: Some("postgres-data".to_string()),
target: "/var/lib/postgresql/data".to_string(),
mount_type: Some("volume".to_string()),
}],
@@ -4426,6 +4700,25 @@ chmod +x ./install.sh
)]),
}));
}
+ if config_files.len() == 1
+ && config_files.get(0)
+ == Some(&PathBuf::from(
+ "/path/to/local/project/.devcontainer/docker-compose-plain.yml",
+ ))
+ {
+ return Ok(Some(DockerComposeConfig {
+ name: None,
+ services: HashMap::from([(
+ "app".to_string(),
+ DockerComposeService {
+ image: Some("test_image:latest".to_string()),
+ command: vec!["sleep".to_string(), "infinity".to_string()],
+ ..Default::default()
+ },
+ )]),
+ ..Default::default()
+ }));
+ }
Err(DevContainerError::DockerNotAvailable)
}
async fn docker_compose_build(
@@ -1,7 +1,7 @@
use std::{collections::HashMap, path::PathBuf};
use async_trait::async_trait;
-use serde::{Deserialize, Deserializer, Serialize};
+use serde::{Deserialize, Deserializer, Serialize, de};
use util::command::Command;
use crate::{
@@ -31,9 +31,10 @@ pub(crate) struct DockerInspect {
pub(crate) state: Option<DockerState>,
}
-#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)]
+#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)]
pub(crate) struct DockerConfigLabels {
#[serde(
+ default,
rename = "devcontainer.metadata",
deserialize_with = "deserialize_metadata"
)]
@@ -43,6 +44,7 @@ pub(crate) struct DockerConfigLabels {
#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)]
#[serde(rename_all = "PascalCase")]
pub(crate) struct DockerInspectConfig {
+ #[serde(default, deserialize_with = "deserialize_nullable_labels")]
pub(crate) labels: DockerConfigLabels,
#[serde(rename = "User")]
pub(crate) image_user: Option<String>,
@@ -54,12 +56,11 @@ impl DockerInspectConfig {
pub(crate) fn env_as_map(&self) -> Result<HashMap<String, String>, DevContainerError> {
let mut map = HashMap::new();
for env_var in &self.env {
- let parts: Vec<&str> = env_var.split("=").collect();
- if parts.len() != 2 {
- log::error!("Unable to parse {env_var} into and environment key-value");
+ let Some((key, value)) = env_var.split_once('=') else {
+ log::error!("Unable to parse {env_var} into an environment key-value");
return Err(DevContainerError::DevContainerParseFailed);
- }
- map.insert(parts[0].to_string(), parts[1].to_string());
+ };
+ map.insert(key.to_string(), value.to_string());
}
Ok(map)
}
@@ -84,6 +85,43 @@ pub(crate) struct DockerComposeServiceBuild {
pub(crate) additional_contexts: Option<HashMap<String, String>>,
}
+#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)]
+pub(crate) struct DockerComposeServicePort {
+ #[serde(deserialize_with = "deserialize_string_or_int")]
+ pub(crate) target: String,
+ #[serde(deserialize_with = "deserialize_string_or_int")]
+ pub(crate) published: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) mode: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) protocol: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) host_ip: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) app_protocol: Option<String>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub(crate) name: Option<String>,
+}
+
+fn deserialize_string_or_int<'de, D>(deserializer: D) -> Result<String, D::Error>
+where
+ D: serde::Deserializer<'de>,
+{
+ use serde::Deserialize;
+
+ #[derive(Deserialize)]
+ #[serde(untagged)]
+ enum StringOrInt {
+ String(String),
+ Int(u32),
+ }
+
+ match StringOrInt::deserialize(deserializer)? {
+ StringOrInt::String(s) => Ok(s),
+ StringOrInt::Int(b) => Ok(b.to_string()),
+ }
+}
+
#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)]
pub(crate) struct DockerComposeService {
pub(crate) image: Option<String>,
@@ -93,19 +131,30 @@ pub(crate) struct DockerComposeService {
pub(crate) cap_add: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) security_opt: Option<Vec<String>>,
- #[serde(skip_serializing_if = "Option::is_none")]
- pub(crate) labels: Option<Vec<String>>,
+ #[serde(
+ skip_serializing_if = "Option::is_none",
+ default,
+ deserialize_with = "deserialize_labels"
+ )]
+ pub(crate) labels: Option<HashMap<String, String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) build: Option<DockerComposeServiceBuild>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) privileged: Option<bool>,
+ #[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) volumes: Vec<MountDefinition>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) env_file: Option<Vec<String>>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
- pub(crate) ports: Vec<String>,
+ pub(crate) ports: Vec<DockerComposeServicePort>,
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) network_mode: Option<String>,
+ #[serde(
+ default,
+ skip_serializing_if = "Vec::is_empty",
+ deserialize_with = "deserialize_nullable_vec"
+ )]
+ pub(crate) command: Vec<String>,
}
#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)]
@@ -118,6 +167,7 @@ pub(crate) struct DockerComposeConfig {
#[serde(skip_serializing_if = "Option::is_none")]
pub(crate) name: Option<String>,
pub(crate) services: HashMap<String, DockerComposeService>,
+ #[serde(default)]
pub(crate) volumes: HashMap<String, DockerComposeVolume>,
}
@@ -355,6 +405,77 @@ pub(crate) trait DockerClient {
fn docker_cli(&self) -> String;
}
+fn deserialize_labels<'de, D>(deserializer: D) -> Result<Option<HashMap<String, String>>, D::Error>
+where
+ D: Deserializer<'de>,
+{
+ struct LabelsVisitor;
+
+ impl<'de> de::Visitor<'de> for LabelsVisitor {
+ type Value = Option<HashMap<String, String>>;
+
+ fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
+ formatter.write_str("a sequence of strings or a map of string key-value pairs")
+ }
+
+ fn visit_seq<A>(self, seq: A) -> Result<Self::Value, A::Error>
+ where
+ A: de::SeqAccess<'de>,
+ {
+ let values = Vec::<String>::deserialize(de::value::SeqAccessDeserializer::new(seq))?;
+
+ Ok(Some(
+ values
+ .iter()
+ .filter_map(|v| {
+ let (key, value) = v.split_once('=')?;
+ Some((key.to_string(), value.to_string()))
+ })
+ .collect(),
+ ))
+ }
+
+ fn visit_map<M>(self, map: M) -> Result<Self::Value, M::Error>
+ where
+ M: de::MapAccess<'de>,
+ {
+ HashMap::<String, String>::deserialize(de::value::MapAccessDeserializer::new(map))
+ .map(|v| Some(v))
+ }
+
+ fn visit_none<E>(self) -> Result<Self::Value, E>
+ where
+ E: de::Error,
+ {
+ Ok(None)
+ }
+
+ fn visit_unit<E>(self) -> Result<Self::Value, E>
+ where
+ E: de::Error,
+ {
+ Ok(None)
+ }
+ }
+
+ deserializer.deserialize_any(LabelsVisitor)
+}
+
+fn deserialize_nullable_vec<'de, D, T>(deserializer: D) -> Result<Vec<T>, D::Error>
+where
+ D: Deserializer<'de>,
+ T: Deserialize<'de>,
+{
+ Option::<Vec<T>>::deserialize(deserializer).map(|opt| opt.unwrap_or_default())
+}
+
+fn deserialize_nullable_labels<'de, D>(deserializer: D) -> Result<DockerConfigLabels, D::Error>
+where
+ D: Deserializer<'de>,
+{
+ Option::<DockerConfigLabels>::deserialize(deserializer).map(|opt| opt.unwrap_or_default())
+}
+
fn deserialize_metadata<'de, D>(
deserializer: D,
) -> Result<Option<Vec<HashMap<String, serde_json_lenient::Value>>>, D::Error>
@@ -417,11 +538,51 @@ mod test {
command_json::deserialize_json_output,
devcontainer_json::MountDefinition,
docker::{
- Docker, DockerComposeConfig, DockerComposeService, DockerComposeVolume, DockerInspect,
- DockerPs, get_remote_dir_from_config,
+ Docker, DockerComposeConfig, DockerComposeService, DockerComposeServicePort,
+ DockerComposeVolume, DockerInspect, DockerPs, get_remote_dir_from_config,
},
};
+ #[test]
+ fn should_parse_simple_env_var() {
+ let config = super::DockerInspectConfig {
+ labels: super::DockerConfigLabels { metadata: None },
+ image_user: None,
+ env: vec!["KEY=value".to_string()],
+ };
+
+ let map = config.env_as_map().unwrap();
+ assert_eq!(map.get("KEY").unwrap(), "value");
+ }
+
+ #[test]
+ fn should_parse_env_var_with_equals_in_value() {
+ let config = super::DockerInspectConfig {
+ labels: super::DockerConfigLabels { metadata: None },
+ image_user: None,
+ env: vec!["COMPLEX=key=val other>=1.0".to_string()],
+ };
+
+ let map = config.env_as_map().unwrap();
+ assert_eq!(map.get("COMPLEX").unwrap(), "key=val other>=1.0");
+ }
+
+ #[test]
+ fn should_parse_simple_label() {
+ let json = r#"{"volumes": [], "labels": ["com.example.key=value"]}"#;
+ let service: DockerComposeService = serde_json_lenient::from_str(json).unwrap();
+ let labels = service.labels.unwrap();
+ assert_eq!(labels.get("com.example.key").unwrap(), "value");
+ }
+
+ #[test]
+ fn should_parse_label_with_equals_in_value() {
+ let json = r#"{"volumes": [], "labels": ["com.example.key=value=with=equals"]}"#;
+ let service: DockerComposeService = serde_json_lenient::from_str(json).unwrap();
+ let labels = service.labels.unwrap();
+ assert_eq!(labels.get("com.example.key").unwrap(), "value=with=equals");
+ }
+
#[test]
fn should_create_docker_inspect_command() {
let docker = Docker::new("docker");
@@ -805,6 +966,22 @@ mod test {
"POSTGRES_PORT": "5432",
"POSTGRES_USER": "postgres"
},
+ "ports": [
+ {
+ "target": "5443",
+ "published": "5442"
+ },
+ {
+ "name": "custom port",
+ "protocol": "udp",
+ "host_ip": "127.0.0.1",
+ "app_protocol": "http",
+ "mode": "host",
+ "target": "8081",
+ "published": "8083"
+
+ }
+ ],
"image": "mcr.microsoft.com/devcontainers/rust:2-1-bookworm",
"network_mode": "service:db",
"volumes": [
@@ -860,15 +1037,33 @@ mod test {
(
"app".to_string(),
DockerComposeService {
+ command: vec!["sleep".to_string(), "infinity".to_string()],
image: Some(
"mcr.microsoft.com/devcontainers/rust:2-1-bookworm".to_string(),
),
volumes: vec![MountDefinition {
mount_type: Some("bind".to_string()),
- source: "/path/to".to_string(),
+ source: Some("/path/to".to_string()),
target: "/workspaces".to_string(),
}],
network_mode: Some("service:db".to_string()),
+
+ ports: vec![
+ DockerComposeServicePort {
+ target: "5443".to_string(),
+ published: "5442".to_string(),
+ ..Default::default()
+ },
+ DockerComposeServicePort {
+ target: "8081".to_string(),
+ published: "8083".to_string(),
+ mode: Some("host".to_string()),
+ protocol: Some("udp".to_string()),
+ host_ip: Some("127.0.0.1".to_string()),
+ app_protocol: Some("http".to_string()),
+ name: Some("custom port".to_string()),
+ },
+ ],
..Default::default()
},
),
@@ -878,7 +1073,7 @@ mod test {
image: Some("postgres:14.1".to_string()),
volumes: vec![MountDefinition {
mount_type: Some("volume".to_string()),
- source: "postgres-data".to_string(),
+ source: Some("postgres-data".to_string()),
target: "/var/lib/postgresql/data".to_string(),
}],
..Default::default()
@@ -895,4 +1090,175 @@ mod test {
assert_eq!(docker_compose_config, expected_config);
}
+
+ #[test]
+ fn should_deserialize_compose_labels_as_map() {
+ let given_config = r#"
+ {
+ "name": "devcontainer",
+ "services": {
+ "app": {
+ "image": "node:22-alpine",
+ "volumes": [],
+ "labels": {
+ "com.example.test": "value",
+ "another.label": "another-value"
+ }
+ }
+ }
+ }
+ "#;
+
+ let config: DockerComposeConfig = serde_json_lenient::from_str(given_config).unwrap();
+ let service = config.services.get("app").unwrap();
+ let labels = service.labels.clone().unwrap();
+ assert_eq!(
+ labels,
+ HashMap::from([
+ ("another.label".to_string(), "another-value".to_string()),
+ ("com.example.test".to_string(), "value".to_string())
+ ])
+ );
+ }
+
+ #[test]
+ fn should_deserialize_compose_labels_as_array() {
+ let given_config = r#"
+ {
+ "name": "devcontainer",
+ "services": {
+ "app": {
+ "image": "node:22-alpine",
+ "volumes": [],
+ "labels": ["com.example.test=value"]
+ }
+ }
+ }
+ "#;
+
+ let config: DockerComposeConfig = serde_json_lenient::from_str(given_config).unwrap();
+ let service = config.services.get("app").unwrap();
+ assert_eq!(
+ service.labels,
+ Some(HashMap::from([(
+ "com.example.test".to_string(),
+ "value".to_string()
+ )]))
+ );
+ }
+
+ #[test]
+ fn should_deserialize_compose_without_volumes() {
+ let given_config = r#"
+ {
+ "name": "devcontainer",
+ "services": {
+ "app": {
+ "image": "node:22-alpine",
+ "volumes": []
+ }
+ }
+ }
+ "#;
+
+ let config: DockerComposeConfig = serde_json_lenient::from_str(given_config).unwrap();
+ assert!(config.volumes.is_empty());
+ }
+
+ #[test]
+ fn should_deserialize_compose_with_missing_volumes_field() {
+ let given_config = r#"
+ {
+ "name": "devcontainer",
+ "services": {
+ "sidecar": {
+ "image": "ubuntu:24.04"
+ }
+ }
+ }
+ "#;
+
+ let config: DockerComposeConfig = serde_json_lenient::from_str(given_config).unwrap();
+ let service = config.services.get("sidecar").unwrap();
+ assert!(service.volumes.is_empty());
+ }
+
+ #[test]
+ fn should_deserialize_compose_volume_without_source() {
+ let given_config = r#"
+ {
+ "name": "devcontainer",
+ "services": {
+ "app": {
+ "image": "ubuntu:24.04",
+ "volumes": [
+ {
+ "type": "tmpfs",
+ "target": "/tmp"
+ }
+ ]
+ }
+ }
+ }
+ "#;
+
+ let config: DockerComposeConfig = serde_json_lenient::from_str(given_config).unwrap();
+ let service = config.services.get("app").unwrap();
+ assert_eq!(service.volumes.len(), 1);
+ assert_eq!(service.volumes[0].source, None);
+ assert_eq!(service.volumes[0].target, "/tmp");
+ assert_eq!(service.volumes[0].mount_type, Some("tmpfs".to_string()));
+ }
+
+ #[test]
+ fn should_deserialize_inspect_without_labels() {
+ let given_config = r#"
+ {
+ "Id": "sha256:abc123",
+ "Config": {
+ "Env": ["PATH=/usr/bin"],
+ "Cmd": ["node"],
+ "WorkingDir": "/"
+ }
+ }
+ "#;
+
+ let inspect: DockerInspect = serde_json_lenient::from_str(given_config).unwrap();
+ assert!(inspect.config.labels.metadata.is_none());
+ assert!(inspect.config.image_user.is_none());
+ }
+
+ #[test]
+ fn should_deserialize_inspect_with_null_labels() {
+ let given_config = r#"
+ {
+ "Id": "sha256:abc123",
+ "Config": {
+ "Labels": null,
+ "Env": ["PATH=/usr/bin"]
+ }
+ }
+ "#;
+
+ let inspect: DockerInspect = serde_json_lenient::from_str(given_config).unwrap();
+ assert!(inspect.config.labels.metadata.is_none());
+ }
+
+ #[test]
+ fn should_deserialize_inspect_with_labels_but_no_metadata() {
+ let given_config = r#"
+ {
+ "Id": "sha256:abc123",
+ "Config": {
+ "Labels": {
+ "com.example.test": "value"
+ },
+ "Env": ["PATH=/usr/bin"]
+ }
+ }
+ "#;
+
+ let inspect: DockerInspect = serde_json_lenient::from_str(given_config).unwrap();
+ assert!(inspect.config.labels.metadata.is_none());
+ }
}
@@ -24,6 +24,7 @@ use settings::Settings;
use std::{
any::{Any, TypeId},
cmp::{self, Ordering},
+ ops::Range,
sync::Arc,
};
use text::{Anchor, BufferSnapshot, OffsetRangeExt};
@@ -480,25 +481,35 @@ impl BufferDiagnosticsEditor {
})
});
- let (anchor_ranges, _) =
- buffer_diagnostics_editor
- .multibuffer
- .update(cx, |multibuffer, cx| {
- let excerpt_ranges = excerpt_ranges
- .into_iter()
- .map(|range| ExcerptRange {
- context: range.context.to_point(&buffer_snapshot),
- primary: range.primary.to_point(&buffer_snapshot),
- })
- .collect();
- multibuffer.set_excerpt_ranges_for_path(
- PathKey::for_buffer(&buffer, cx),
- buffer.clone(),
- &buffer_snapshot,
- excerpt_ranges,
- cx,
- )
- });
+ let excerpt_ranges: Vec<_> = excerpt_ranges
+ .into_iter()
+ .map(|range| ExcerptRange {
+ context: range.context.to_point(&buffer_snapshot),
+ primary: range.primary.to_point(&buffer_snapshot),
+ })
+ .collect();
+ buffer_diagnostics_editor
+ .multibuffer
+ .update(cx, |multibuffer, cx| {
+ multibuffer.set_excerpt_ranges_for_path(
+ PathKey::for_buffer(&buffer, cx),
+ buffer.clone(),
+ &buffer_snapshot,
+ excerpt_ranges.clone(),
+ cx,
+ )
+ });
+ let multibuffer_snapshot =
+ buffer_diagnostics_editor.multibuffer.read(cx).snapshot(cx);
+ let anchor_ranges: Vec<Range<editor::Anchor>> = excerpt_ranges
+ .into_iter()
+ .filter_map(|range| {
+ let text_range = buffer_snapshot.anchor_range_inside(range.primary);
+ let start = multibuffer_snapshot.anchor_in_buffer(text_range.start)?;
+ let end = multibuffer_snapshot.anchor_in_buffer(text_range.end)?;
+ Some(start..end)
+ })
+ .collect();
if was_empty {
if let Some(anchor_range) = anchor_ranges.first() {
@@ -10,7 +10,7 @@ use language::{BufferId, Diagnostic, DiagnosticEntryRef, LanguageRegistry};
use lsp::DiagnosticSeverity;
use markdown::{CopyButtonVisibility, Markdown, MarkdownElement};
use settings::Settings;
-use text::{AnchorRangeExt, Point};
+use text::Point;
use theme_settings::ThemeSettings;
use ui::{CopyButton, prelude::*};
use util::maybe;
@@ -289,23 +289,12 @@ impl DiagnosticBlock {
.nth(ix)
{
let multibuffer = editor.buffer().read(cx);
- let Some(snapshot) = multibuffer
- .buffer(buffer_id)
- .map(|entity| entity.read(cx).snapshot())
- else {
+ if let Some(anchor_range) = multibuffer
+ .snapshot(cx)
+ .buffer_anchor_range_to_anchor_range(diagnostic.range)
+ {
+ Self::jump_to(editor, anchor_range, window, cx);
return;
- };
-
- for (excerpt_id, _, range) in multibuffer.excerpts_for_buffer(buffer_id, cx) {
- if range.context.overlaps(&diagnostic.range, &snapshot) {
- Self::jump_to(
- editor,
- Anchor::range_in_buffer(excerpt_id, diagnostic.range),
- window,
- cx,
- );
- return;
- }
}
}
} else if let Some(diagnostic) = editor
@@ -12,7 +12,7 @@ use buffer_diagnostics::BufferDiagnosticsEditor;
use collections::{BTreeSet, HashMap, HashSet};
use diagnostic_renderer::DiagnosticBlock;
use editor::{
- Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey,
+ Anchor, Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey,
display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId},
multibuffer_context_lines,
};
@@ -301,17 +301,21 @@ impl ProjectDiagnosticsEditor {
let snapshot = self
.editor
.update(cx, |editor, cx| editor.display_snapshot(cx));
- let buffer = self.multibuffer.read(cx);
- let buffer_ids = buffer.all_buffer_ids();
let selected_buffers = self.editor.update(cx, |editor, _| {
editor
.selections
.all_anchors(&snapshot)
.iter()
- .filter_map(|anchor| anchor.start.text_anchor.buffer_id)
+ .filter_map(|anchor| {
+ Some(snapshot.anchor_to_buffer_anchor(anchor.start)?.0.buffer_id)
+ })
.collect::<HashSet<_>>()
});
- for buffer_id in buffer_ids {
+ for buffer_id in snapshot
+ .excerpts()
+ .map(|excerpt| excerpt.context.start.buffer_id)
+ .dedup()
+ {
if retain_selections && selected_buffers.contains(&buffer_id) {
continue;
}
@@ -329,7 +333,7 @@ impl ProjectDiagnosticsEditor {
continue;
}
self.multibuffer.update(cx, |b, cx| {
- b.remove_excerpts_for_path(PathKey::for_buffer(&buffer, cx), cx);
+ b.remove_excerpts(PathKey::for_buffer(&buffer, cx), cx);
});
}
}
@@ -581,9 +585,8 @@ impl ProjectDiagnosticsEditor {
match retain_excerpts {
RetainExcerpts::Dirty if !is_dirty => Vec::new(),
RetainExcerpts::All | RetainExcerpts::Dirty => multi_buffer
- .excerpts_for_buffer(buffer_id, cx)
- .into_iter()
- .map(|(_, _, range)| range)
+ .snapshot(cx)
+ .excerpts_for_buffer(buffer_id)
.sorted_by(|a, b| cmp_excerpts(&buffer_snapshot, a, b))
.collect(),
}
@@ -621,22 +624,34 @@ impl ProjectDiagnosticsEditor {
});
})
}
- let (anchor_ranges, _) = this.multibuffer.update(cx, |multi_buffer, cx| {
- let excerpt_ranges = excerpt_ranges
- .into_iter()
- .map(|range| ExcerptRange {
- context: range.context.to_point(&buffer_snapshot),
- primary: range.primary.to_point(&buffer_snapshot),
- })
- .collect();
+ let buffer_snapshot = buffer.read(cx).snapshot();
+ let excerpt_ranges: Vec<_> = excerpt_ranges
+ .into_iter()
+ .map(|range| ExcerptRange {
+ context: range.context.to_point(&buffer_snapshot),
+ primary: range.primary.to_point(&buffer_snapshot),
+ })
+ .collect();
+ // TODO(cole): maybe should use the nonshrinking API?
+ this.multibuffer.update(cx, |multi_buffer, cx| {
multi_buffer.set_excerpt_ranges_for_path(
PathKey::for_buffer(&buffer, cx),
buffer.clone(),
&buffer_snapshot,
- excerpt_ranges,
+ excerpt_ranges.clone(),
cx,
)
});
+ let multibuffer_snapshot = this.multibuffer.read(cx).snapshot(cx);
+ let anchor_ranges: Vec<Range<Anchor>> = excerpt_ranges
+ .into_iter()
+ .filter_map(|range| {
+ let text_range = buffer_snapshot.anchor_range_inside(range.primary);
+ let start = multibuffer_snapshot.anchor_in_buffer(text_range.start)?;
+ let end = multibuffer_snapshot.anchor_in_buffer(text_range.end)?;
+ Some(start..end)
+ })
+ .collect();
#[cfg(test)]
let cloned_blocks = result_blocks.clone();
@@ -26,6 +26,7 @@ cloud_llm_client.workspace = true
collections.workspace = true
copilot.workspace = true
copilot_ui.workspace = true
+credentials_provider.workspace = true
db.workspace = true
edit_prediction_types.workspace = true
edit_prediction_context.workspace = true
@@ -65,6 +66,7 @@ uuid.workspace = true
workspace.workspace = true
worktree.workspace = true
zed_actions.workspace = true
+zed_credentials_provider.workspace = true
zeta_prompt.workspace = true
zstd.workspace = true
@@ -258,6 +258,7 @@ fn generate_timestamp_name() -> String {
mod tests {
use super::*;
use crate::EditPredictionStore;
+ use client::RefreshLlmTokenListener;
use client::{Client, UserStore};
use clock::FakeSystemClock;
use gpui::{AppContext as _, TestAppContext, http_client::FakeHttpClient};
@@ -414,7 +415,7 @@ mod tests {
capture_example(
project.clone(),
buffer.clone(),
- Anchor::MIN,
+ Anchor::min_for_buffer(buffer.read(cx).remote_id()),
events,
true,
cx,
@@ -548,7 +549,8 @@ mod tests {
let http_client = FakeHttpClient::with_404_response();
let client = Client::new(Arc::new(FakeSystemClock::new()), http_client, cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
- language_model::init(user_store.clone(), client.clone(), cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
EditPredictionStore::global(&client, &user_store, cx);
})
}
@@ -1,5 +1,5 @@
use anyhow::Result;
-use client::{Client, EditPredictionUsage, UserStore};
+use client::{Client, EditPredictionUsage, NeedsLlmTokenRefresh, UserStore, global_llm_token};
use cloud_api_types::{OrganizationId, SubmitEditPredictionFeedbackBody};
use cloud_llm_client::predict_edits_v3::{
PredictEditsV3Request, PredictEditsV3Response, RawCompletionRequest, RawCompletionResponse,
@@ -11,6 +11,7 @@ use cloud_llm_client::{
};
use collections::{HashMap, HashSet};
use copilot::{Copilot, Reinstall, SignIn, SignOut};
+use credentials_provider::CredentialsProvider;
use db::kvp::{Dismissable, KeyValueStore};
use edit_prediction_context::{RelatedExcerptStore, RelatedExcerptStoreEvent, RelatedFile};
use feature_flags::{FeatureFlag, FeatureFlagAppExt as _};
@@ -30,7 +31,7 @@ use heapless::Vec as ArrayVec;
use language::language_settings::all_language_settings;
use language::{Anchor, Buffer, File, Point, TextBufferSnapshot, ToOffset, ToPoint};
use language::{BufferSnapshot, OffsetRangeExt};
-use language_model::{LlmApiToken, NeedsLlmTokenRefresh};
+use language_model::LlmApiToken;
use project::{DisableAiSettings, Project, ProjectPath, WorktreeId};
use release_channel::AppVersion;
use semver::Version;
@@ -150,6 +151,7 @@ pub struct EditPredictionStore {
rated_predictions: HashSet<EditPredictionId>,
#[cfg(test)]
settled_event_callback: Option<Box<dyn Fn(EditPredictionId, String)>>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
}
pub(crate) struct EditPredictionRejectionPayload {
@@ -746,7 +748,7 @@ impl EditPredictionStore {
pub fn new(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut Context<Self>) -> Self {
let data_collection_choice = Self::load_data_collection_choice(cx);
- let llm_token = LlmApiToken::global(cx);
+ let llm_token = global_llm_token(cx);
let (reject_tx, reject_rx) = mpsc::unbounded();
cx.background_spawn({
@@ -787,6 +789,8 @@ impl EditPredictionStore {
.log_err();
});
+ let credentials_provider = zed_credentials_provider::global(cx);
+
let this = Self {
projects: HashMap::default(),
client,
@@ -807,6 +811,8 @@ impl EditPredictionStore {
shown_predictions: Default::default(),
#[cfg(test)]
settled_event_callback: None,
+
+ credentials_provider,
};
this
@@ -871,7 +877,9 @@ impl EditPredictionStore {
let experiments = cx
.background_spawn(async move {
let http_client = client.http_client();
- let token = llm_token.acquire(&client, organization_id).await?;
+ let token = client
+ .acquire_llm_token(&llm_token, organization_id.clone())
+ .await?;
let url = http_client.build_zed_llm_url("/edit_prediction_experiments", &[])?;
let request = http_client::Request::builder()
.method(Method::GET)
@@ -1676,7 +1684,7 @@ impl EditPredictionStore {
buffer.pending_predictions.push(PendingSettledPrediction {
request_id: request_id,
editable_anchor_range: edited_buffer_snapshot
- .anchor_range_around(editable_offset_range),
+ .anchor_range_inside(editable_offset_range),
example,
e2e_latency,
enqueued_at: now,
@@ -2315,7 +2323,10 @@ impl EditPredictionStore {
zeta::request_prediction_with_zeta(self, inputs, capture_data, cx)
}
EditPredictionModel::Fim { format } => fim::request_prediction(inputs, format, cx),
- EditPredictionModel::Mercury => self.mercury.request_prediction(inputs, cx),
+ EditPredictionModel::Mercury => {
+ self.mercury
+ .request_prediction(inputs, self.credentials_provider.clone(), cx)
+ }
};
cx.spawn(async move |this, cx| {
@@ -2351,7 +2362,10 @@ impl EditPredictionStore {
cx: &mut AsyncApp,
) -> Result<Option<(Entity<Buffer>, language::Anchor)>> {
let collaborator_cursor_rows: Vec<u32> = active_buffer_snapshot
- .selections_in_range(Anchor::MIN..Anchor::MAX, false)
+ .selections_in_range(
+ Anchor::min_max_range_for_buffer(active_buffer_snapshot.remote_id()),
+ false,
+ )
.flat_map(|(_, _, _, selections)| {
selections.map(|s| s.head().to_point(active_buffer_snapshot).row)
})
@@ -2427,7 +2441,10 @@ impl EditPredictionStore {
candidate_buffer.read_with(cx, |buffer, _cx| {
let snapshot = buffer.snapshot();
let has_collaborators = snapshot
- .selections_in_range(Anchor::MIN..Anchor::MAX, false)
+ .selections_in_range(
+ Anchor::min_max_range_for_buffer(snapshot.remote_id()),
+ false,
+ )
.next()
.is_some();
let position = buffer
@@ -2530,12 +2547,15 @@ impl EditPredictionStore {
Res: DeserializeOwned,
{
let http_client = client.http_client();
-
let mut token = if require_auth {
- Some(llm_token.acquire(&client, organization_id.clone()).await?)
+ Some(
+ client
+ .acquire_llm_token(&llm_token, organization_id.clone())
+ .await?,
+ )
} else {
- llm_token
- .acquire(&client, organization_id.clone())
+ client
+ .acquire_llm_token(&llm_token, organization_id.clone())
.await
.ok()
};
@@ -2579,7 +2599,11 @@ impl EditPredictionStore {
return Ok((serde_json::from_slice(&body)?, usage));
} else if !did_retry && token.is_some() && response.needs_llm_token_refresh() {
did_retry = true;
- token = Some(llm_token.refresh(&client, organization_id.clone()).await?);
+ token = Some(
+ client
+ .refresh_llm_token(&llm_token, organization_id.clone())
+ .await?,
+ );
} else {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
@@ -2761,7 +2785,7 @@ fn collaborator_edit_overlaps_locality_region(
(position..position).to_point(snapshot),
COLLABORATOR_EDIT_LOCALITY_CONTEXT_TOKENS,
);
- let locality_anchor_range = snapshot.anchor_range_around(locality_point_range);
+ let locality_anchor_range = snapshot.anchor_range_inside(locality_point_range);
edit_range.overlaps(&locality_anchor_range, snapshot)
}
@@ -1,6 +1,6 @@
use super::*;
use crate::udiff::apply_diff_to_string;
-use client::{UserStore, test::FakeServer};
+use client::{RefreshLlmTokenListener, UserStore, test::FakeServer};
use clock::FakeSystemClock;
use clock::ReplicaId;
use cloud_api_types::{CreateLlmTokenResponse, LlmToken};
@@ -23,7 +23,7 @@ use language::{
Anchor, Buffer, Capability, CursorShape, Diagnostic, DiagnosticEntry, DiagnosticSet,
DiagnosticSeverity, Operation, Point, Selection, SelectionGoal,
};
-use language_model::RefreshLlmTokenListener;
+
use lsp::LanguageServerId;
use parking_lot::Mutex;
use pretty_assertions::{assert_eq, assert_matches};
@@ -2439,7 +2439,8 @@ fn init_test_with_fake_client(
client.cloud_client().set_credentials(1, "test".into());
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
- language_model::init(user_store.clone(), client.clone(), cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
let ep_store = EditPredictionStore::global(&client, &user_store, cx);
(
@@ -2706,6 +2707,65 @@ async fn test_edit_prediction_no_spurious_trailing_newline(cx: &mut TestAppConte
});
}
+#[gpui::test]
+async fn test_v3_prediction_strips_cursor_marker_from_edit_text(cx: &mut TestAppContext) {
+ let (ep_store, mut requests) = init_test_with_fake_client(cx);
+ let fs = FakeFs::new(cx.executor());
+
+ fs.insert_tree(
+ "/root",
+ json!({
+ "foo.txt": "hello"
+ }),
+ )
+ .await;
+ let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
+
+ let buffer = project
+ .update(cx, |project, cx| {
+ let path = project
+ .find_project_path(path!("root/foo.txt"), cx)
+ .unwrap();
+ project.open_buffer(path, cx)
+ })
+ .await
+ .unwrap();
+
+ let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
+ let position = snapshot.anchor_before(language::Point::new(0, 5));
+
+ ep_store.update(cx, |ep_store, cx| {
+ ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx);
+ });
+
+ let (request, respond_tx) = requests.predict.next().await.unwrap();
+ let excerpt_length = request.input.cursor_excerpt.len();
+ respond_tx
+ .send(PredictEditsV3Response {
+ request_id: Uuid::new_v4().to_string(),
+ output: "hello<|user_cursor|> world".to_string(),
+ editable_range: 0..excerpt_length,
+ model_version: None,
+ })
+ .unwrap();
+
+ cx.run_until_parked();
+
+ ep_store.update(cx, |ep_store, cx| {
+ let prediction = ep_store
+ .prediction_at(&buffer, None, &project, cx)
+ .expect("should have prediction");
+ let snapshot = buffer.read(cx).snapshot();
+ let edits: Vec<_> = prediction
+ .edits
+ .iter()
+ .map(|(range, text)| (range.to_offset(&snapshot), text.clone()))
+ .collect();
+
+ assert_eq!(edits, vec![(5..5, " world".into())]);
+ });
+}
+
fn init_test(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
@@ -2891,7 +2951,7 @@ async fn test_unauthenticated_without_custom_url_blocks_prediction_impl(cx: &mut
cx.update(|cx| client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx));
let user_store = cx.update(|cx| cx.new(|cx| client::UserStore::new(client.clone(), cx)));
cx.update(|cx| {
- language_model::RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
});
let ep_store = cx.new(|cx| EditPredictionStore::new(client, project.read(cx).user_store(), cx));
@@ -1,10 +1,11 @@
-use crate::udiff::DiffLine;
use anyhow::{Context as _, Result};
use serde::{Deserialize, Serialize};
use std::{borrow::Cow, fmt::Write as _, mem, path::Path, sync::Arc};
use telemetry_events::EditPredictionRating;
-pub const CURSOR_POSITION_MARKER: &str = "[CURSOR_POSITION]";
+pub use zeta_prompt::udiff::{
+ CURSOR_POSITION_MARKER, encode_cursor_in_patch, extract_cursor_from_patch,
+};
pub const INLINE_CURSOR_MARKER: &str = "<|user_cursor|>";
/// Maximum cursor file size to capture (64KB).
@@ -12,64 +13,6 @@ pub const INLINE_CURSOR_MARKER: &str = "<|user_cursor|>";
/// falling back to git-based loading.
pub const MAX_CURSOR_FILE_SIZE: usize = 64 * 1024;
-/// Encodes a cursor position into a diff patch by adding a comment line with a caret
-/// pointing to the cursor column.
-///
-/// The cursor offset is relative to the start of the new text content (additions and context lines).
-/// Returns the patch with cursor marker comment lines inserted after the relevant addition line.
-pub fn encode_cursor_in_patch(patch: &str, cursor_offset: Option<usize>) -> String {
- let Some(cursor_offset) = cursor_offset else {
- return patch.to_string();
- };
-
- let mut result = String::new();
- let mut line_start_offset = 0usize;
-
- for line in patch.lines() {
- if matches!(
- DiffLine::parse(line),
- DiffLine::Garbage(content)
- if content.starts_with('#') && content.contains(CURSOR_POSITION_MARKER)
- ) {
- continue;
- }
-
- if !result.is_empty() {
- result.push('\n');
- }
- result.push_str(line);
-
- match DiffLine::parse(line) {
- DiffLine::Addition(content) => {
- let line_end_offset = line_start_offset + content.len();
-
- if cursor_offset >= line_start_offset && cursor_offset <= line_end_offset {
- let cursor_column = cursor_offset - line_start_offset;
-
- result.push('\n');
- result.push('#');
- for _ in 0..cursor_column {
- result.push(' ');
- }
- write!(result, "^{}", CURSOR_POSITION_MARKER).unwrap();
- }
-
- line_start_offset = line_end_offset + 1;
- }
- DiffLine::Context(content) => {
- line_start_offset += content.len() + 1;
- }
- _ => {}
- }
- }
-
- if patch.ends_with('\n') {
- result.push('\n');
- }
-
- result
-}
-
#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)]
pub struct ExampleSpec {
#[serde(default)]
@@ -509,53 +452,7 @@ impl ExampleSpec {
pub fn expected_patches_with_cursor_positions(&self) -> Vec<(String, Option<usize>)> {
self.expected_patches
.iter()
- .map(|patch| {
- let mut clean_patch = String::new();
- let mut cursor_offset: Option<usize> = None;
- let mut line_start_offset = 0usize;
- let mut prev_line_start_offset = 0usize;
-
- for line in patch.lines() {
- let diff_line = DiffLine::parse(line);
-
- match &diff_line {
- DiffLine::Garbage(content)
- if content.starts_with('#')
- && content.contains(CURSOR_POSITION_MARKER) =>
- {
- let caret_column = if let Some(caret_pos) = content.find('^') {
- caret_pos
- } else if let Some(_) = content.find('<') {
- 0
- } else {
- continue;
- };
- let cursor_column = caret_column.saturating_sub('#'.len_utf8());
- cursor_offset = Some(prev_line_start_offset + cursor_column);
- }
- _ => {
- if !clean_patch.is_empty() {
- clean_patch.push('\n');
- }
- clean_patch.push_str(line);
-
- match diff_line {
- DiffLine::Addition(content) | DiffLine::Context(content) => {
- prev_line_start_offset = line_start_offset;
- line_start_offset += content.len() + 1;
- }
- _ => {}
- }
- }
- }
- }
-
- if patch.ends_with('\n') && !clean_patch.is_empty() {
- clean_patch.push('\n');
- }
-
- (clean_patch, cursor_offset)
- })
+ .map(|patch| extract_cursor_from_patch(patch))
.collect()
}
@@ -21,14 +21,23 @@ use worktree::ChildEntriesOptions;
static LICENSE_FILE_NAME_REGEX: LazyLock<regex::bytes::Regex> = LazyLock::new(|| {
regex::bytes::RegexBuilder::new(
"^ \
- (?: license | licence)? \
- (?: [\\-._]? \
+ (?: \
+ (?: license | licence) \
+ (?: [\\-._]? \
+ (?: apache (?: [\\-._] (?: 2.0 | 2 ))? | \
+ 0? bsd (?: [\\-._] [0123])? (?: [\\-._] clause)? | \
+ isc | \
+ mit | \
+ upl | \
+ zlib))? \
+ | \
(?: apache (?: [\\-._] (?: 2.0 | 2 ))? | \
0? bsd (?: [\\-._] [0123])? (?: [\\-._] clause)? | \
isc | \
mit | \
upl | \
- zlib))? \
+ zlib) \
+ ) \
(?: [\\-._]? (?: license | licence))? \
(?: \\.txt | \\.md)? \
$",
@@ -310,6 +319,7 @@ impl LicenseDetectionWatcher {
}
worktree::Event::DeletedEntry(_)
| worktree::Event::UpdatedGitRepositories(_)
+ | worktree::Event::UpdatedRootRepoCommonDir
| worktree::Event::Deleted => {}
});
@@ -350,6 +360,9 @@ impl LicenseDetectionWatcher {
return None;
};
let metadata = fs.metadata(&abs_path).await.log_err()??;
+ if metadata.is_dir {
+ return None;
+ }
if metadata.len > LICENSE_PATTERNS.approximate_max_length as u64 {
log::debug!(
"`{abs_path:?}` license file was skipped \
@@ -697,6 +710,7 @@ mod tests {
assert!(LICENSE_FILE_NAME_REGEX.is_match(b"licence-upl.txt"));
// Test non-matching patterns
+ assert!(!LICENSE_FILE_NAME_REGEX.is_match(b""));
assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"COPYING"));
assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE.html"));
assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"MYLICENSE"));
@@ -5,6 +5,7 @@ use crate::{
};
use anyhow::{Context as _, Result};
use cloud_llm_client::EditPredictionRejectReason;
+use credentials_provider::CredentialsProvider;
use futures::AsyncReadExt as _;
use gpui::{
App, AppContext as _, Context, Entity, Global, SharedString, Task,
@@ -51,10 +52,11 @@ impl Mercury {
debug_tx,
..
}: EditPredictionModelInput,
+ credentials_provider: Arc<dyn CredentialsProvider>,
cx: &mut Context<EditPredictionStore>,
) -> Task<Result<Option<EditPredictionResult>>> {
self.api_token.update(cx, |key_state, cx| {
- _ = key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, cx);
+ _ = key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, credentials_provider, cx);
});
let Some(api_token) = self.api_token.read(cx).key(&MERCURY_CREDENTIALS_URL) else {
return Task::ready(Ok(None));
@@ -387,8 +389,9 @@ pub fn mercury_api_token(cx: &mut App) -> Entity<ApiKeyState> {
}
pub fn load_mercury_api_token(cx: &mut App) -> Task<Result<(), language_model::AuthenticateError>> {
+ let credentials_provider = zed_credentials_provider::global(cx);
mercury_api_token(cx).update(cx, |key_state, cx| {
- key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, cx)
+ key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, credentials_provider, cx)
})
}
@@ -42,9 +42,10 @@ pub fn open_ai_compatible_api_token(cx: &mut App) -> Entity<ApiKeyState> {
pub fn load_open_ai_compatible_api_token(
cx: &mut App,
) -> Task<Result<(), language_model::AuthenticateError>> {
+ let credentials_provider = zed_credentials_provider::global(cx);
let api_url = open_ai_compatible_api_url(cx);
open_ai_compatible_api_token(cx).update(cx, |key_state, cx| {
- key_state.load_if_needed(api_url, |s| s, cx)
+ key_state.load_if_needed(api_url, |s| s, credentials_provider, cx)
})
}
@@ -1,11 +1,4 @@
-use std::{
- borrow::Cow,
- fmt::{Debug, Display, Write},
- mem,
- ops::Range,
- path::{Path, PathBuf},
- sync::Arc,
-};
+use std::{mem, ops::Range, path::Path, path::PathBuf, sync::Arc};
use anyhow::{Context as _, Result, anyhow};
use collections::{HashMap, hash_map::Entry};
@@ -15,6 +8,14 @@ use postage::stream::Stream as _;
use project::Project;
use util::{paths::PathStyle, rel_path::RelPath};
use worktree::Worktree;
+use zeta_prompt::udiff::{
+ DiffEvent, DiffParser, FileStatus, Hunk, disambiguate_by_line_number, find_context_candidates,
+};
+
+pub use zeta_prompt::udiff::{
+ DiffLine, HunkLocation, apply_diff_to_string, apply_diff_to_string_with_hunk_offset,
+ strip_diff_metadata, strip_diff_path_prefix,
+};
#[derive(Clone, Debug)]
pub struct OpenedBuffers(HashMap<String, Entity<Buffer>>);
@@ -54,7 +55,6 @@ pub async fn apply_diff(
let mut included_files: HashMap<String, Entity<Buffer>> = HashMap::default();
- let ranges = [Anchor::MIN..Anchor::MAX];
let mut diff = DiffParser::new(diff_str);
let mut current_file = None;
let mut edits: Vec<(std::ops::Range<Anchor>, Arc<str>)> = vec![];
@@ -115,7 +115,7 @@ pub async fn apply_diff(
edits.extend(resolve_hunk_edits_in_buffer(
hunk,
buffer,
- ranges.as_slice(),
+ &[Anchor::min_max_range_for_buffer(buffer.remote_id())],
status,
)?);
anyhow::Ok(())
@@ -190,209 +190,6 @@ pub async fn refresh_worktree_entries(
Ok(())
}
-pub fn strip_diff_path_prefix<'a>(diff: &'a str, prefix: &str) -> Cow<'a, str> {
- if prefix.is_empty() {
- return Cow::Borrowed(diff);
- }
-
- let prefix_with_slash = format!("{}/", prefix);
- let mut needs_rewrite = false;
-
- for line in diff.lines() {
- match DiffLine::parse(line) {
- DiffLine::OldPath { path } | DiffLine::NewPath { path } => {
- if path.starts_with(&prefix_with_slash) {
- needs_rewrite = true;
- break;
- }
- }
- _ => {}
- }
- }
-
- if !needs_rewrite {
- return Cow::Borrowed(diff);
- }
-
- let mut result = String::with_capacity(diff.len());
- for line in diff.lines() {
- match DiffLine::parse(line) {
- DiffLine::OldPath { path } => {
- let stripped = path
- .strip_prefix(&prefix_with_slash)
- .unwrap_or(path.as_ref());
- result.push_str(&format!("--- a/{}\n", stripped));
- }
- DiffLine::NewPath { path } => {
- let stripped = path
- .strip_prefix(&prefix_with_slash)
- .unwrap_or(path.as_ref());
- result.push_str(&format!("+++ b/{}\n", stripped));
- }
- _ => {
- result.push_str(line);
- result.push('\n');
- }
- }
- }
-
- Cow::Owned(result)
-}
-/// Strip unnecessary git metadata lines from a diff, keeping only the lines
-/// needed for patch application: path headers (--- and +++), hunk headers (@@),
-/// and content lines (+, -, space).
-pub fn strip_diff_metadata(diff: &str) -> String {
- let mut result = String::new();
-
- for line in diff.lines() {
- let dominated = DiffLine::parse(line);
- match dominated {
- // Keep path headers, hunk headers, and content lines
- DiffLine::OldPath { .. }
- | DiffLine::NewPath { .. }
- | DiffLine::HunkHeader(_)
- | DiffLine::Context(_)
- | DiffLine::Deletion(_)
- | DiffLine::Addition(_)
- | DiffLine::NoNewlineAtEOF => {
- result.push_str(line);
- result.push('\n');
- }
- // Skip garbage lines (diff --git, index, etc.)
- DiffLine::Garbage(_) => {}
- }
- }
-
- result
-}
-
-/// Find all byte offsets where `hunk.context` occurs as a substring of `text`.
-///
-/// If no exact matches are found and the context ends with `'\n'` but `text`
-/// does not, retries without the trailing newline, accepting only a match at
-/// the very end of `text`. When this fallback fires, the hunk's context is
-/// trimmed and its edit ranges are clamped so that downstream code doesn't
-/// index past the end of the matched region. This handles diffs that are
-/// missing a `\ No newline at end of file` marker: the parser always appends
-/// `'\n'` via `writeln!`, so the context can have a trailing newline that
-/// doesn't exist in the source text.
-fn find_context_candidates(text: &str, hunk: &mut Hunk) -> Vec<usize> {
- let candidates: Vec<usize> = text
- .match_indices(&hunk.context)
- .map(|(offset, _)| offset)
- .collect();
-
- if !candidates.is_empty() {
- return candidates;
- }
-
- if hunk.context.ends_with('\n') && !hunk.context.is_empty() {
- let old_len = hunk.context.len();
- hunk.context.pop();
- let new_len = hunk.context.len();
-
- if !hunk.context.is_empty() {
- let candidates: Vec<usize> = text
- .match_indices(&hunk.context)
- .filter(|(offset, _)| offset + new_len == text.len())
- .map(|(offset, _)| offset)
- .collect();
-
- if !candidates.is_empty() {
- for edit in &mut hunk.edits {
- let touched_phantom = edit.range.end > new_len;
- edit.range.start = edit.range.start.min(new_len);
- edit.range.end = edit.range.end.min(new_len);
- if touched_phantom {
- // The replacement text was also written with a
- // trailing '\n' that corresponds to the phantom
- // newline we just removed from the context.
- if edit.text.ends_with('\n') {
- edit.text.pop();
- }
- }
- }
- return candidates;
- }
-
- // Restore if fallback didn't help either.
- hunk.context.push('\n');
- debug_assert_eq!(hunk.context.len(), old_len);
- } else {
- hunk.context.push('\n');
- }
- }
-
- Vec::new()
-}
-
-/// Given multiple candidate offsets where context matches, use line numbers to disambiguate.
-/// Returns the offset that matches the expected line, or None if no match or no line number available.
-fn disambiguate_by_line_number(
- candidates: &[usize],
- expected_line: Option<u32>,
- offset_to_line: &dyn Fn(usize) -> u32,
-) -> Option<usize> {
- match candidates.len() {
- 0 => None,
- 1 => Some(candidates[0]),
- _ => {
- let expected = expected_line?;
- candidates
- .iter()
- .copied()
- .find(|&offset| offset_to_line(offset) == expected)
- }
- }
-}
-
-pub fn apply_diff_to_string(diff_str: &str, text: &str) -> Result<String> {
- apply_diff_to_string_with_hunk_offset(diff_str, text).map(|(text, _)| text)
-}
-
-/// Applies a diff to a string and returns the result along with the offset where
-/// the first hunk's context matched in the original text. This offset can be used
-/// to adjust cursor positions that are relative to the hunk's content.
-pub fn apply_diff_to_string_with_hunk_offset(
- diff_str: &str,
- text: &str,
-) -> Result<(String, Option<usize>)> {
- let mut diff = DiffParser::new(diff_str);
-
- let mut text = text.to_string();
- let mut first_hunk_offset = None;
-
- while let Some(event) = diff.next().context("Failed to parse diff")? {
- match event {
- DiffEvent::Hunk {
- mut hunk,
- path: _,
- status: _,
- } => {
- let candidates = find_context_candidates(&text, &mut hunk);
-
- let hunk_offset =
- disambiguate_by_line_number(&candidates, hunk.start_line, &|offset| {
- text[..offset].matches('\n').count() as u32
- })
- .ok_or_else(|| anyhow!("couldn't resolve hunk"))?;
-
- if first_hunk_offset.is_none() {
- first_hunk_offset = Some(hunk_offset);
- }
-
- for edit in hunk.edits.iter().rev() {
- let range = (hunk_offset + edit.range.start)..(hunk_offset + edit.range.end);
- text.replace_range(range, &edit.text);
- }
- }
- DiffEvent::FileEnd { .. } => {}
- }
- }
-
- Ok((text, first_hunk_offset))
-}
-
/// Returns the individual edits that would be applied by a diff to the given content.
/// Each edit is a tuple of (byte_range_in_content, replacement_text).
/// Uses sub-line diffing to find the precise character positions of changes.
@@ -441,227 +238,6 @@ pub fn edits_for_diff(content: &str, diff_str: &str) -> Result<Vec<(Range<usize>
Ok(result)
}
-struct PatchFile<'a> {
- old_path: Cow<'a, str>,
- new_path: Cow<'a, str>,
-}
-
-struct DiffParser<'a> {
- current_file: Option<PatchFile<'a>>,
- current_line: Option<(&'a str, DiffLine<'a>)>,
- hunk: Hunk,
- diff: std::str::Lines<'a>,
- pending_start_line: Option<u32>,
- processed_no_newline: bool,
- last_diff_op: LastDiffOp,
-}
-
-#[derive(Clone, Copy, Default)]
-enum LastDiffOp {
- #[default]
- None,
- Context,
- Deletion,
- Addition,
-}
-
-#[derive(Debug, PartialEq)]
-enum DiffEvent<'a> {
- Hunk {
- path: Cow<'a, str>,
- hunk: Hunk,
- status: FileStatus,
- },
- FileEnd {
- renamed_to: Option<Cow<'a, str>>,
- },
-}
-
-#[derive(Debug, Clone, Copy, PartialEq)]
-enum FileStatus {
- Created,
- Modified,
- Deleted,
-}
-
-#[derive(Debug, Default, PartialEq)]
-struct Hunk {
- context: String,
- edits: Vec<Edit>,
- start_line: Option<u32>,
-}
-
-impl Hunk {
- fn is_empty(&self) -> bool {
- self.context.is_empty() && self.edits.is_empty()
- }
-}
-
-#[derive(Debug, PartialEq)]
-struct Edit {
- range: Range<usize>,
- text: String,
-}
-
-impl<'a> DiffParser<'a> {
- fn new(diff: &'a str) -> Self {
- let mut diff = diff.lines();
- let current_line = diff.next().map(|line| (line, DiffLine::parse(line)));
- DiffParser {
- current_file: None,
- hunk: Hunk::default(),
- current_line,
- diff,
- pending_start_line: None,
- processed_no_newline: false,
- last_diff_op: LastDiffOp::None,
- }
- }
-
- fn next(&mut self) -> Result<Option<DiffEvent<'a>>> {
- loop {
- let (hunk_done, file_done) = match self.current_line.as_ref().map(|e| &e.1) {
- Some(DiffLine::OldPath { .. }) | Some(DiffLine::Garbage(_)) | None => (true, true),
- Some(DiffLine::HunkHeader(_)) => (true, false),
- _ => (false, false),
- };
-
- if hunk_done {
- if let Some(file) = &self.current_file
- && !self.hunk.is_empty()
- {
- let status = if file.old_path == "/dev/null" {
- FileStatus::Created
- } else if file.new_path == "/dev/null" {
- FileStatus::Deleted
- } else {
- FileStatus::Modified
- };
- let path = if status == FileStatus::Created {
- file.new_path.clone()
- } else {
- file.old_path.clone()
- };
- let mut hunk = mem::take(&mut self.hunk);
- hunk.start_line = self.pending_start_line.take();
- self.processed_no_newline = false;
- self.last_diff_op = LastDiffOp::None;
- return Ok(Some(DiffEvent::Hunk { path, hunk, status }));
- }
- }
-
- if file_done {
- if let Some(PatchFile { old_path, new_path }) = self.current_file.take() {
- return Ok(Some(DiffEvent::FileEnd {
- renamed_to: if old_path != new_path && old_path != "/dev/null" {
- Some(new_path)
- } else {
- None
- },
- }));
- }
- }
-
- let Some((line, parsed_line)) = self.current_line.take() else {
- break;
- };
-
- util::maybe!({
- match parsed_line {
- DiffLine::OldPath { path } => {
- self.current_file = Some(PatchFile {
- old_path: path,
- new_path: "".into(),
- });
- }
- DiffLine::NewPath { path } => {
- if let Some(current_file) = &mut self.current_file {
- current_file.new_path = path
- }
- }
- DiffLine::HunkHeader(location) => {
- if let Some(loc) = location {
- self.pending_start_line = Some(loc.start_line_old);
- }
- }
- DiffLine::Context(ctx) => {
- if self.current_file.is_some() {
- writeln!(&mut self.hunk.context, "{ctx}")?;
- self.last_diff_op = LastDiffOp::Context;
- }
- }
- DiffLine::Deletion(del) => {
- if self.current_file.is_some() {
- let range = self.hunk.context.len()
- ..self.hunk.context.len() + del.len() + '\n'.len_utf8();
- if let Some(last_edit) = self.hunk.edits.last_mut()
- && last_edit.range.end == range.start
- {
- last_edit.range.end = range.end;
- } else {
- self.hunk.edits.push(Edit {
- range,
- text: String::new(),
- });
- }
- writeln!(&mut self.hunk.context, "{del}")?;
- self.last_diff_op = LastDiffOp::Deletion;
- }
- }
- DiffLine::Addition(add) => {
- if self.current_file.is_some() {
- let range = self.hunk.context.len()..self.hunk.context.len();
- if let Some(last_edit) = self.hunk.edits.last_mut()
- && last_edit.range.end == range.start
- {
- writeln!(&mut last_edit.text, "{add}").unwrap();
- } else {
- self.hunk.edits.push(Edit {
- range,
- text: format!("{add}\n"),
- });
- }
- self.last_diff_op = LastDiffOp::Addition;
- }
- }
- DiffLine::NoNewlineAtEOF => {
- if !self.processed_no_newline {
- self.processed_no_newline = true;
- match self.last_diff_op {
- LastDiffOp::Addition => {
- // Remove trailing newline from the last addition
- if let Some(last_edit) = self.hunk.edits.last_mut() {
- last_edit.text.pop();
- }
- }
- LastDiffOp::Deletion => {
- // Remove trailing newline from context (which includes the deletion)
- self.hunk.context.pop();
- if let Some(last_edit) = self.hunk.edits.last_mut() {
- last_edit.range.end -= 1;
- }
- }
- LastDiffOp::Context | LastDiffOp::None => {
- // Remove trailing newline from context
- self.hunk.context.pop();
- }
- }
- }
- }
- DiffLine::Garbage(_) => {}
- }
-
- anyhow::Ok(())
- })
- .with_context(|| format!("on line:\n\n```\n{}```", line))?;
-
- self.current_line = self.diff.next().map(|line| (line, DiffLine::parse(line)));
- }
-
- anyhow::Ok(None)
- }
-}
-
fn resolve_hunk_edits_in_buffer(
mut hunk: Hunk,
buffer: &TextBufferSnapshot,
@@ -714,144 +290,6 @@ fn resolve_hunk_edits_in_buffer(
Ok(iter)
}
-#[derive(Debug, PartialEq)]
-pub enum DiffLine<'a> {
- OldPath { path: Cow<'a, str> },
- NewPath { path: Cow<'a, str> },
- HunkHeader(Option<HunkLocation>),
- Context(&'a str),
- Deletion(&'a str),
- Addition(&'a str),
- NoNewlineAtEOF,
- Garbage(&'a str),
-}
-
-#[derive(Debug, PartialEq)]
-pub struct HunkLocation {
- pub start_line_old: u32,
- count_old: u32,
- pub start_line_new: u32,
- count_new: u32,
-}
-
-impl<'a> DiffLine<'a> {
- pub fn parse(line: &'a str) -> Self {
- Self::try_parse(line).unwrap_or(Self::Garbage(line))
- }
-
- fn try_parse(line: &'a str) -> Option<Self> {
- if line.starts_with("\\ No newline") {
- return Some(Self::NoNewlineAtEOF);
- }
- if let Some(header) = line.strip_prefix("---").and_then(eat_required_whitespace) {
- let path = parse_header_path("a/", header);
- Some(Self::OldPath { path })
- } else if let Some(header) = line.strip_prefix("+++").and_then(eat_required_whitespace) {
- Some(Self::NewPath {
- path: parse_header_path("b/", header),
- })
- } else if let Some(header) = line.strip_prefix("@@").and_then(eat_required_whitespace) {
- if header.starts_with("...") {
- return Some(Self::HunkHeader(None));
- }
-
- let mut tokens = header.split_whitespace();
- let old_range = tokens.next()?.strip_prefix('-')?;
- let new_range = tokens.next()?.strip_prefix('+')?;
-
- let (start_line_old, count_old) = old_range.split_once(',').unwrap_or((old_range, "1"));
- let (start_line_new, count_new) = new_range.split_once(',').unwrap_or((new_range, "1"));
-
- Some(Self::HunkHeader(Some(HunkLocation {
- start_line_old: start_line_old.parse::<u32>().ok()?.saturating_sub(1),
- count_old: count_old.parse().ok()?,
- start_line_new: start_line_new.parse::<u32>().ok()?.saturating_sub(1),
- count_new: count_new.parse().ok()?,
- })))
- } else if let Some(deleted_header) = line.strip_prefix("-") {
- Some(Self::Deletion(deleted_header))
- } else if line.is_empty() {
- Some(Self::Context(""))
- } else if let Some(context) = line.strip_prefix(" ") {
- Some(Self::Context(context))
- } else {
- Some(Self::Addition(line.strip_prefix("+")?))
- }
- }
-}
-
-impl<'a> Display for DiffLine<'a> {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match self {
- DiffLine::OldPath { path } => write!(f, "--- {path}"),
- DiffLine::NewPath { path } => write!(f, "+++ {path}"),
- DiffLine::HunkHeader(Some(hunk_location)) => {
- write!(
- f,
- "@@ -{},{} +{},{} @@",
- hunk_location.start_line_old + 1,
- hunk_location.count_old,
- hunk_location.start_line_new + 1,
- hunk_location.count_new
- )
- }
- DiffLine::HunkHeader(None) => write!(f, "@@ ... @@"),
- DiffLine::Context(content) => write!(f, " {content}"),
- DiffLine::Deletion(content) => write!(f, "-{content}"),
- DiffLine::Addition(content) => write!(f, "+{content}"),
- DiffLine::NoNewlineAtEOF => write!(f, "\\ No newline at end of file"),
- DiffLine::Garbage(line) => write!(f, "{line}"),
- }
- }
-}
-
-fn parse_header_path<'a>(strip_prefix: &'static str, header: &'a str) -> Cow<'a, str> {
- if !header.contains(['"', '\\']) {
- let path = header.split_ascii_whitespace().next().unwrap_or(header);
- return Cow::Borrowed(path.strip_prefix(strip_prefix).unwrap_or(path));
- }
-
- let mut path = String::with_capacity(header.len());
- let mut in_quote = false;
- let mut chars = header.chars().peekable();
- let mut strip_prefix = Some(strip_prefix);
-
- while let Some(char) = chars.next() {
- if char == '"' {
- in_quote = !in_quote;
- } else if char == '\\' {
- let Some(&next_char) = chars.peek() else {
- break;
- };
- chars.next();
- path.push(next_char);
- } else if char.is_ascii_whitespace() && !in_quote {
- break;
- } else {
- path.push(char);
- }
-
- if let Some(prefix) = strip_prefix
- && path == prefix
- {
- strip_prefix.take();
- path.clear();
- }
- }
-
- Cow::Owned(path)
-}
-
-fn eat_required_whitespace(header: &str) -> Option<&str> {
- let trimmed = header.trim_ascii_start();
-
- if trimmed.len() == header.len() {
- None
- } else {
- Some(trimmed)
- }
-}
-
#[cfg(test)]
mod tests {
use super::*;
@@ -863,387 +301,6 @@ mod tests {
use settings::SettingsStore;
use util::path;
- #[test]
- fn parse_lines_simple() {
- let input = indoc! {"
- diff --git a/text.txt b/text.txt
- index 86c770d..a1fd855 100644
- --- a/file.txt
- +++ b/file.txt
- @@ -1,2 +1,3 @@
- context
- -deleted
- +inserted
- garbage
-
- --- b/file.txt
- +++ a/file.txt
- "};
-
- let lines = input.lines().map(DiffLine::parse).collect::<Vec<_>>();
-
- pretty_assertions::assert_eq!(
- lines,
- &[
- DiffLine::Garbage("diff --git a/text.txt b/text.txt"),
- DiffLine::Garbage("index 86c770d..a1fd855 100644"),
- DiffLine::OldPath {
- path: "file.txt".into()
- },
- DiffLine::NewPath {
- path: "file.txt".into()
- },
- DiffLine::HunkHeader(Some(HunkLocation {
- start_line_old: 0,
- count_old: 2,
- start_line_new: 0,
- count_new: 3
- })),
- DiffLine::Context("context"),
- DiffLine::Deletion("deleted"),
- DiffLine::Addition("inserted"),
- DiffLine::Garbage("garbage"),
- DiffLine::Context(""),
- DiffLine::OldPath {
- path: "b/file.txt".into()
- },
- DiffLine::NewPath {
- path: "a/file.txt".into()
- },
- ]
- );
- }
-
- #[test]
- fn file_header_extra_space() {
- let options = ["--- file", "--- file", "---\tfile"];
-
- for option in options {
- pretty_assertions::assert_eq!(
- DiffLine::parse(option),
- DiffLine::OldPath {
- path: "file".into()
- },
- "{option}",
- );
- }
- }
-
- #[test]
- fn hunk_header_extra_space() {
- let options = [
- "@@ -1,2 +1,3 @@",
- "@@ -1,2 +1,3 @@",
- "@@\t-1,2\t+1,3\t@@",
- "@@ -1,2 +1,3 @@",
- "@@ -1,2 +1,3 @@",
- "@@ -1,2 +1,3 @@",
- "@@ -1,2 +1,3 @@ garbage",
- ];
-
- for option in options {
- pretty_assertions::assert_eq!(
- DiffLine::parse(option),
- DiffLine::HunkHeader(Some(HunkLocation {
- start_line_old: 0,
- count_old: 2,
- start_line_new: 0,
- count_new: 3
- })),
- "{option}",
- );
- }
- }
-
- #[test]
- fn hunk_header_without_location() {
- pretty_assertions::assert_eq!(DiffLine::parse("@@ ... @@"), DiffLine::HunkHeader(None));
- }
-
- #[test]
- fn test_parse_path() {
- assert_eq!(parse_header_path("a/", "foo.txt"), "foo.txt");
- assert_eq!(
- parse_header_path("a/", "foo/bar/baz.txt"),
- "foo/bar/baz.txt"
- );
- assert_eq!(parse_header_path("a/", "a/foo.txt"), "foo.txt");
- assert_eq!(
- parse_header_path("a/", "a/foo/bar/baz.txt"),
- "foo/bar/baz.txt"
- );
-
- // Extra
- assert_eq!(
- parse_header_path("a/", "a/foo/bar/baz.txt 2025"),
- "foo/bar/baz.txt"
- );
- assert_eq!(
- parse_header_path("a/", "a/foo/bar/baz.txt\t2025"),
- "foo/bar/baz.txt"
- );
- assert_eq!(
- parse_header_path("a/", "a/foo/bar/baz.txt \""),
- "foo/bar/baz.txt"
- );
-
- // Quoted
- assert_eq!(
- parse_header_path("a/", "a/foo/bar/\"baz quox.txt\""),
- "foo/bar/baz quox.txt"
- );
- assert_eq!(
- parse_header_path("a/", "\"a/foo/bar/baz quox.txt\""),
- "foo/bar/baz quox.txt"
- );
- assert_eq!(
- parse_header_path("a/", "\"foo/bar/baz quox.txt\""),
- "foo/bar/baz quox.txt"
- );
- assert_eq!(parse_header_path("a/", "\"whatever 🤷\""), "whatever 🤷");
- assert_eq!(
- parse_header_path("a/", "\"foo/bar/baz quox.txt\" 2025"),
- "foo/bar/baz quox.txt"
- );
- // unescaped quotes are dropped
- assert_eq!(parse_header_path("a/", "foo/\"bar\""), "foo/bar");
-
- // Escaped
- assert_eq!(
- parse_header_path("a/", "\"foo/\\\"bar\\\"/baz.txt\""),
- "foo/\"bar\"/baz.txt"
- );
- assert_eq!(
- parse_header_path("a/", "\"C:\\\\Projects\\\\My App\\\\old file.txt\""),
- "C:\\Projects\\My App\\old file.txt"
- );
- }
-
- #[test]
- fn test_parse_diff_with_leading_and_trailing_garbage() {
- let diff = indoc! {"
- I need to make some changes.
-
- I'll change the following things:
- - one
- - two
- - three
-
- ```
- --- a/file.txt
- +++ b/file.txt
- one
- +AND
- two
- ```
-
- Summary of what I did:
- - one
- - two
- - three
-
- That's about it.
- "};
-
- let mut events = Vec::new();
- let mut parser = DiffParser::new(diff);
- while let Some(event) = parser.next().unwrap() {
- events.push(event);
- }
-
- assert_eq!(
- events,
- &[
- DiffEvent::Hunk {
- path: "file.txt".into(),
- hunk: Hunk {
- context: "one\ntwo\n".into(),
- edits: vec![Edit {
- range: 4..4,
- text: "AND\n".into()
- }],
- start_line: None,
- },
- status: FileStatus::Modified,
- },
- DiffEvent::FileEnd { renamed_to: None }
- ],
- )
- }
-
- #[test]
- fn test_no_newline_at_eof() {
- let diff = indoc! {"
- --- a/file.py
- +++ b/file.py
- @@ -55,7 +55,3 @@ class CustomDataset(Dataset):
- torch.set_rng_state(state)
- mask = self.transform(mask)
-
- - if self.mode == 'Training':
- - return (img, mask, name)
- - else:
- - return (img, mask, name)
- \\ No newline at end of file
- "};
-
- let mut events = Vec::new();
- let mut parser = DiffParser::new(diff);
- while let Some(event) = parser.next().unwrap() {
- events.push(event);
- }
-
- assert_eq!(
- events,
- &[
- DiffEvent::Hunk {
- path: "file.py".into(),
- hunk: Hunk {
- context: concat!(
- " torch.set_rng_state(state)\n",
- " mask = self.transform(mask)\n",
- "\n",
- " if self.mode == 'Training':\n",
- " return (img, mask, name)\n",
- " else:\n",
- " return (img, mask, name)",
- )
- .into(),
- edits: vec![Edit {
- range: 80..203,
- text: "".into()
- }],
- start_line: Some(54), // @@ -55,7 -> line 54 (0-indexed)
- },
- status: FileStatus::Modified,
- },
- DiffEvent::FileEnd { renamed_to: None }
- ],
- );
- }
-
- #[test]
- fn test_no_newline_at_eof_addition() {
- let diff = indoc! {"
- --- a/file.txt
- +++ b/file.txt
- @@ -1,2 +1,3 @@
- context
- -deleted
- +added line
- \\ No newline at end of file
- "};
-
- let mut events = Vec::new();
- let mut parser = DiffParser::new(diff);
- while let Some(event) = parser.next().unwrap() {
- events.push(event);
- }
-
- assert_eq!(
- events,
- &[
- DiffEvent::Hunk {
- path: "file.txt".into(),
- hunk: Hunk {
- context: "context\ndeleted\n".into(),
- edits: vec![Edit {
- range: 8..16,
- text: "added line".into()
- }],
- start_line: Some(0), // @@ -1,2 -> line 0 (0-indexed)
- },
- status: FileStatus::Modified,
- },
- DiffEvent::FileEnd { renamed_to: None }
- ],
- );
- }
-
- #[test]
- fn test_double_no_newline_at_eof() {
- // Two consecutive "no newline" markers - the second should be ignored
- let diff = indoc! {"
- --- a/file.txt
- +++ b/file.txt
- @@ -1,3 +1,3 @@
- line1
- -old
- +new
- line3
- \\ No newline at end of file
- \\ No newline at end of file
- "};
-
- let mut events = Vec::new();
- let mut parser = DiffParser::new(diff);
- while let Some(event) = parser.next().unwrap() {
- events.push(event);
- }
-
- assert_eq!(
- events,
- &[
- DiffEvent::Hunk {
- path: "file.txt".into(),
- hunk: Hunk {
- context: "line1\nold\nline3".into(), // Only one newline removed
- edits: vec![Edit {
- range: 6..10, // "old\n" is 4 bytes
- text: "new\n".into()
- }],
- start_line: Some(0),
- },
- status: FileStatus::Modified,
- },
- DiffEvent::FileEnd { renamed_to: None }
- ],
- );
- }
-
- #[test]
- fn test_no_newline_after_context_not_addition() {
- // "No newline" after context lines should remove newline from context,
- // not from an earlier addition
- let diff = indoc! {"
- --- a/file.txt
- +++ b/file.txt
- @@ -1,4 +1,4 @@
- line1
- -old
- +new
- line3
- line4
- \\ No newline at end of file
- "};
-
- let mut events = Vec::new();
- let mut parser = DiffParser::new(diff);
- while let Some(event) = parser.next().unwrap() {
- events.push(event);
- }
-
- assert_eq!(
- events,
- &[
- DiffEvent::Hunk {
- path: "file.txt".into(),
- hunk: Hunk {
- // newline removed from line4 (context), not from "new" (addition)
- context: "line1\nold\nline3\nline4".into(),
- edits: vec![Edit {
- range: 6..10, // "old\n" is 4 bytes
- text: "new\n".into() // Still has newline
- }],
- start_line: Some(0),
- },
- status: FileStatus::Modified,
- },
- DiffEvent::FileEnd { renamed_to: None }
- ],
- );
- }
-
#[test]
fn test_line_number_disambiguation() {
// Test that line numbers from hunk headers are used to disambiguate
@@ -24,8 +24,9 @@ use zeta_prompt::{ParsedOutput, ZetaPromptInput};
use std::{env, ops::Range, path::Path, sync::Arc};
use zeta_prompt::{
- CURSOR_MARKER, ZetaFormat, format_zeta_prompt, get_prefill, parse_zeta2_model_output,
- prompt_input_contains_special_tokens, stop_tokens_for_format,
+ ZetaFormat, format_zeta_prompt, get_prefill, parse_zeta2_model_output,
+ parsed_output_from_editable_region, prompt_input_contains_special_tokens,
+ stop_tokens_for_format,
zeta1::{self, EDITABLE_REGION_END_MARKER},
};
@@ -181,6 +182,7 @@ pub fn request_prediction_with_zeta(
let parsed_output = output_text.map(|text| ParsedOutput {
new_editable_region: text,
range_in_excerpt: editable_range_in_excerpt,
+ cursor_offset_in_new_editable_region: None,
});
(request_id, parsed_output, None, None)
@@ -283,10 +285,10 @@ pub fn request_prediction_with_zeta(
let request_id = EditPredictionId(response.request_id.into());
let output_text = Some(response.output).filter(|s| !s.is_empty());
let model_version = response.model_version;
- let parsed_output = ParsedOutput {
- new_editable_region: output_text.unwrap_or_default(),
- range_in_excerpt: response.editable_range,
- };
+ let parsed_output = parsed_output_from_editable_region(
+ response.editable_range,
+ output_text.unwrap_or_default(),
+ );
Some((request_id, Some(parsed_output), model_version, usage))
})
@@ -299,6 +301,7 @@ pub fn request_prediction_with_zeta(
let Some(ParsedOutput {
new_editable_region: mut output_text,
range_in_excerpt: editable_range_in_excerpt,
+ cursor_offset_in_new_editable_region: cursor_offset_in_output,
}) = output
else {
return Ok((Some((request_id, None)), None));
@@ -312,13 +315,6 @@ pub fn request_prediction_with_zeta(
.text_for_range(editable_range_in_buffer.clone())
.collect::<String>();
- // Client-side cursor marker processing (applies to both raw and v3 responses)
- let cursor_offset_in_output = output_text.find(CURSOR_MARKER);
- if let Some(offset) = cursor_offset_in_output {
- log::trace!("Stripping out {CURSOR_MARKER} from response at offset {offset}");
- output_text.replace_range(offset..offset + CURSOR_MARKER.len(), "");
- }
-
if let Some(debug_tx) = &debug_tx {
debug_tx
.unbounded_send(DebugEvent::EditPredictionFinished(
@@ -8,6 +8,9 @@ license = "GPL-3.0-or-later"
[lints]
workspace = true
+[lib]
+path = "src/lib.rs"
+
[[bin]]
name = "ep"
path = "src/main.rs"
@@ -80,9 +83,14 @@ dynamic_prompts = []
ignored = ["wasmtime"]
[dev-dependencies]
+criterion.workspace = true
gpui = { workspace = true, features = ["test-support"] }
indoc.workspace = true
pretty_assertions.workspace = true
project = { workspace = true, features = ["test-support"] }
tempfile.workspace = true
workspace = { workspace = true, features = ["test-support"] }
+
+[[bench]]
+name = "kept_rate"
+harness = false
@@ -0,0 +1,128 @@
+use criterion::{BenchmarkId, Criterion, black_box, criterion_group, criterion_main};
+use edit_prediction_cli::kept_rate::compute_kept_rate;
+
+fn repeated_function_lines(line_count: usize) -> String {
+ let mut text = String::with_capacity(line_count * 32);
+ for index in 0..line_count {
+ text.push_str("fn helper_");
+ text.push_str(&(index % 16).to_string());
+ text.push_str("() { value += old_name + 1; }\n");
+ }
+ text
+}
+
+fn localized_rename_inputs(line_count: usize) -> (String, String, String) {
+ let base = repeated_function_lines(line_count);
+ let mut predicted = base.clone();
+ let mut final_text = base.clone();
+
+ let needle = "value += old_name + 1;";
+ let prediction = "value += very_long_predicted_name + 1;";
+ let accepted = "value += new_name + 1;";
+
+ let offset = base
+ .rfind(needle)
+ .expect("expected needle in synthetic input");
+ let end = offset + needle.len();
+
+ predicted.replace_range(offset..end, prediction);
+ final_text.replace_range(offset..end, accepted);
+
+ (base, predicted, final_text)
+}
+
+fn identical_new_content_inputs(line_count: usize) -> (String, String, String) {
+ let predicted = repeated_function_lines(line_count);
+ (String::new(), predicted.clone(), predicted)
+}
+
+fn repetitive_token_inputs(token_repetitions: usize) -> (String, String, String) {
+ let repeated_old = "foo + foo + foo + foo + foo\n".repeat(token_repetitions);
+ let repeated_predicted = "foo + foo + prediction_token + foo + foo\n".repeat(token_repetitions);
+ let repeated_final = "foo + foo + kept_token + foo + foo\n".repeat(token_repetitions);
+ (repeated_old, repeated_predicted, repeated_final)
+}
+
+fn kept_rate_benchmark(c: &mut Criterion) {
+ let mut no_change_group = c.benchmark_group("kept_rate/no_change");
+ for line_count in [128usize, 512, 2048] {
+ let text = repeated_function_lines(line_count);
+ no_change_group.bench_with_input(
+ BenchmarkId::new("lines", line_count),
+ &text,
+ |bench, text| {
+ bench.iter(|| {
+ black_box(compute_kept_rate(
+ black_box(text),
+ black_box(text),
+ black_box(text),
+ ));
+ });
+ },
+ );
+ }
+ no_change_group.finish();
+
+ let mut localized_group = c.benchmark_group("kept_rate/localized_rename");
+ for line_count in [128usize, 512, 2048] {
+ let inputs = localized_rename_inputs(line_count);
+ localized_group.bench_with_input(
+ BenchmarkId::new("lines", line_count),
+ &inputs,
+ |bench, inputs| {
+ let (base, predicted, final_text) = inputs;
+ bench.iter(|| {
+ black_box(compute_kept_rate(
+ black_box(base),
+ black_box(predicted),
+ black_box(final_text),
+ ));
+ });
+ },
+ );
+ }
+ localized_group.finish();
+
+ let mut addition_group = c.benchmark_group("kept_rate/identical_addition");
+ for line_count in [128usize, 512, 2048] {
+ let inputs = identical_new_content_inputs(line_count);
+ addition_group.bench_with_input(
+ BenchmarkId::new("lines", line_count),
+ &inputs,
+ |bench, inputs| {
+ let (base, predicted, final_text) = inputs;
+ bench.iter(|| {
+ black_box(compute_kept_rate(
+ black_box(base),
+ black_box(predicted),
+ black_box(final_text),
+ ));
+ });
+ },
+ );
+ }
+ addition_group.finish();
+
+ let mut repetitive_group = c.benchmark_group("kept_rate/repetitive_tokens");
+ for token_repetitions in [64usize, 256, 1024] {
+ let inputs = repetitive_token_inputs(token_repetitions);
+ repetitive_group.bench_with_input(
+ BenchmarkId::new("repetitions", token_repetitions),
+ &inputs,
+ |bench, inputs| {
+ let (base, predicted, final_text) = inputs;
+ bench.iter(|| {
+ black_box(compute_kept_rate(
+ black_box(base),
+ black_box(predicted),
+ black_box(final_text),
+ ));
+ });
+ },
+ );
+ }
+ repetitive_group.finish();
+}
+
+criterion_group!(benches, kept_rate_benchmark);
+criterion_main!(benches);
@@ -184,6 +184,8 @@ pub struct ExampleScore {
#[serde(default)]
pub deleted_tokens: usize,
#[serde(default, skip_serializing_if = "Option::is_none")]
+ pub kept_rate: Option<f64>,
+ #[serde(default, skip_serializing_if = "Option::is_none")]
pub cumulative_logprob: Option<f64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub avg_logprob: Option<f64>,
@@ -6,11 +6,11 @@ use crate::{
retrieve_context::run_context_retrieval,
};
use anyhow::{Context as _, Result, anyhow};
-use edit_prediction::udiff;
use gpui::AsyncApp;
use similar::DiffableStr;
use std::ops::Range;
use std::sync::Arc;
+use zeta_prompt::udiff;
use zeta_prompt::{
ZetaFormat, encode_patch_as_output_for_format, excerpt_range_for_format, format_zeta_prompt,
multi_region, output_end_marker_for_format, resolve_cursor_region,
@@ -1,4 +1,4 @@
-use client::{Client, ProxySettings, UserStore};
+use client::{Client, ProxySettings, RefreshLlmTokenListener, UserStore};
use db::AppDatabase;
use extension::ExtensionHostProxy;
use fs::RealFs;
@@ -109,7 +109,8 @@ pub fn init(cx: &mut App) -> EpAppState {
debug_adapter_extension::init(extension_host_proxy.clone(), cx);
language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone());
- language_model::init(user_store.clone(), client.clone(), cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
language_models::init(user_store.clone(), client.clone(), cx);
languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx);
prompt_store::init(cx);
@@ -0,0 +1,427 @@
+use crate::word_diff::tokenize;
+
+#[cfg(test)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum TokenAnnotation {
+ Context,
+ Kept,
+ Discarded,
+}
+
+#[allow(dead_code)]
+#[derive(Debug, Clone)]
+pub struct KeptRateResult {
+ pub predicted_new_chars: usize,
+ pub final_new_chars: usize,
+ pub kept_chars: usize,
+ pub discarded_chars: usize,
+ pub context_chars: usize,
+ pub kept_rate: f64,
+ #[cfg(test)]
+ pub token_annotations: Vec<TokenAnnotation>,
+}
+
+fn dp_index(width: usize, row: usize, column: usize) -> usize {
+ row * width + column
+}
+
+/// Return masks over `a` and `b` using one-sided LCS tie-breaking for each
+/// side while sharing a single DP table construction.
+fn lcs_keep_masks(a: &[&str], b: &[&str]) -> (Vec<bool>, Vec<bool>) {
+ if a.is_empty() || b.is_empty() {
+ return (vec![false; a.len()], vec![false; b.len()]);
+ }
+
+ if a == b {
+ return (vec![true; a.len()], vec![true; b.len()]);
+ }
+
+ let mut keep_a = vec![false; a.len()];
+ let mut keep_b = vec![false; b.len()];
+
+ let prefix_len = a
+ .iter()
+ .zip(b.iter())
+ .take_while(|(left, right)| left == right)
+ .count();
+ let suffix_len = {
+ let max_suffix = (a.len() - prefix_len).min(b.len() - prefix_len);
+ let mut suffix_len = 0;
+
+ while suffix_len < max_suffix {
+ let a_index = a.len() - 1 - suffix_len;
+ let b_index = b.len() - 1 - suffix_len;
+ if a[a_index] != b[b_index] {
+ break;
+ }
+ suffix_len += 1;
+ }
+
+ suffix_len
+ };
+
+ for index in 0..prefix_len {
+ keep_a[index] = true;
+ keep_b[index] = true;
+ }
+
+ for offset in 0..suffix_len {
+ let a_index = a.len() - suffix_len + offset;
+ let b_index = b.len() - suffix_len + offset;
+ keep_a[a_index] = true;
+ keep_b[b_index] = true;
+ }
+
+ let a_mid = &a[prefix_len..a.len() - suffix_len];
+ let b_mid = &b[prefix_len..b.len() - suffix_len];
+
+ if a_mid.is_empty() || b_mid.is_empty() {
+ return (keep_a, keep_b);
+ }
+
+ let row_count = a_mid.len() + 1;
+ let column_count = b_mid.len() + 1;
+ let mut dp = vec![0u32; row_count * column_count];
+
+ for i in 1..row_count {
+ let token_a = a_mid[i - 1];
+ for j in 1..column_count {
+ let index = dp_index(column_count, i, j);
+ if token_a == b_mid[j - 1] {
+ dp[index] = dp[dp_index(column_count, i - 1, j - 1)] + 1;
+ } else {
+ let up = dp[dp_index(column_count, i - 1, j)];
+ let left = dp[dp_index(column_count, i, j - 1)];
+ dp[index] = up.max(left);
+ }
+ }
+ }
+
+ let mut i = a_mid.len();
+ let mut j = b_mid.len();
+
+ while i > 0 && j > 0 {
+ if a_mid[i - 1] == b_mid[j - 1] {
+ keep_a[prefix_len + i - 1] = true;
+ i -= 1;
+ j -= 1;
+ } else {
+ let up = dp[dp_index(column_count, i - 1, j)];
+ let left = dp[dp_index(column_count, i, j - 1)];
+ if up >= left {
+ i -= 1;
+ } else {
+ j -= 1;
+ }
+ }
+ }
+
+ let mut i = a_mid.len();
+ let mut j = b_mid.len();
+
+ while i > 0 && j > 0 {
+ if a_mid[i - 1] == b_mid[j - 1] {
+ keep_b[prefix_len + j - 1] = true;
+ i -= 1;
+ j -= 1;
+ } else {
+ let up = dp[dp_index(column_count, i - 1, j)];
+ let left = dp[dp_index(column_count, i, j - 1)];
+ if left >= up {
+ j -= 1;
+ } else {
+ i -= 1;
+ }
+ }
+ }
+
+ (keep_a, keep_b)
+}
+
+fn analyze_masked_tokens<'a>(tokens: &[&'a str], mask: &[bool]) -> (Vec<&'a str>, usize, usize) {
+ let mut unmasked_tokens = Vec::with_capacity(tokens.len());
+ let mut unmasked_chars = 0;
+ let mut masked_chars = 0;
+
+ for (&token, &is_masked) in tokens.iter().zip(mask.iter()) {
+ if is_masked {
+ masked_chars += token.len();
+ } else {
+ unmasked_tokens.push(token);
+ unmasked_chars += token.len();
+ }
+ }
+
+ (unmasked_tokens, unmasked_chars, masked_chars)
+}
+
+pub fn compute_kept_rate(base: &str, predicted: &str, final_text: &str) -> KeptRateResult {
+ if base == predicted && predicted == final_text {
+ let predicted_tokens = tokenize(predicted);
+ let context_chars = predicted_tokens.iter().map(|token| token.len()).sum();
+ return KeptRateResult {
+ predicted_new_chars: 0,
+ final_new_chars: 0,
+ kept_chars: 0,
+ discarded_chars: 0,
+ context_chars,
+ kept_rate: 1.0,
+ #[cfg(test)]
+ token_annotations: vec![TokenAnnotation::Context; predicted_tokens.len()],
+ };
+ }
+
+ let base_tokens = tokenize(base);
+ let predicted_tokens = tokenize(predicted);
+ let final_tokens = tokenize(final_text);
+
+ let (pred_base_mask, _) = lcs_keep_masks(&predicted_tokens, &base_tokens);
+ let (pred_final_mask, final_pred_mask) = lcs_keep_masks(&predicted_tokens, &final_tokens);
+ let context_mask: Vec<bool> = pred_base_mask
+ .iter()
+ .zip(pred_final_mask.iter())
+ .map(|(&in_base, &in_final)| in_base && in_final)
+ .collect();
+
+ let (stripped_predicted, predicted_new_chars, context_chars) =
+ analyze_masked_tokens(&predicted_tokens, &context_mask);
+
+ let (final_base_mask, _) = lcs_keep_masks(&final_tokens, &base_tokens);
+ let final_context_mask: Vec<bool> = final_base_mask
+ .iter()
+ .zip(final_pred_mask.iter())
+ .map(|(&in_base, &in_predicted)| in_base && in_predicted)
+ .collect();
+
+ let (stripped_final, final_new_chars, _) =
+ analyze_masked_tokens(&final_tokens, &final_context_mask);
+
+ let keep_mask = lcs_keep_masks(&stripped_predicted, &stripped_final).0;
+
+ let kept_chars: usize = stripped_predicted
+ .iter()
+ .zip(keep_mask.iter())
+ .filter_map(|(&token, &is_kept)| is_kept.then_some(token.len()))
+ .sum();
+
+ let discarded_chars = predicted_new_chars - kept_chars;
+
+ let kept_rate = if predicted_new_chars == 0 {
+ if final_new_chars == 0 { 1.0 } else { 0.0 }
+ } else {
+ kept_chars as f64 / predicted_new_chars as f64
+ };
+
+ #[cfg(test)]
+ let token_annotations = {
+ let mut token_annotations = Vec::with_capacity(predicted_tokens.len());
+ let mut new_index = 0;
+ for (token_index, _token) in predicted_tokens.iter().enumerate() {
+ if context_mask[token_index] {
+ token_annotations.push(TokenAnnotation::Context);
+ } else {
+ let annotation = if keep_mask[new_index] {
+ TokenAnnotation::Kept
+ } else {
+ TokenAnnotation::Discarded
+ };
+ #[cfg(test)]
+ token_annotations.push(annotation);
+ new_index += 1;
+ }
+ }
+ token_annotations
+ };
+
+ KeptRateResult {
+ predicted_new_chars,
+ final_new_chars,
+ kept_chars,
+ discarded_chars,
+ context_chars,
+ kept_rate,
+ #[cfg(test)]
+ token_annotations,
+ }
+}
+
+#[cfg(test)]
+mod test_kept_rate {
+ use super::*;
+
+ #[test]
+ fn test_lcs_keep_masks() {
+ let (a_mask, b_mask) = lcs_keep_masks(&["a", "b", "c", "d", "e"], &["a", "c", "e"]);
+ assert_eq!(a_mask, vec![true, false, true, false, true]);
+ assert_eq!(b_mask, vec![true, true, true]);
+
+ let (a_mask, b_mask) = lcs_keep_masks(&[], &["x"]);
+ assert!(a_mask.is_empty());
+ assert_eq!(b_mask, vec![false]);
+ }
+
+ #[test]
+ fn test_lcs_keep_masks_matches_historical_one_sided_masks() {
+ let a = ["x", "a", "x", "b"];
+ let b = ["a", "x", "b", "x"];
+ let (a_mask, b_mask) = lcs_keep_masks(&a, &b);
+ assert_eq!(a_mask, lcs_keep_masks(&a, &b).0);
+ assert_eq!(b_mask, lcs_keep_masks(&b, &a).0);
+ }
+
+ #[test]
+ fn test_rate_extremes() {
+ let no_change = compute_kept_rate("foo bar", "foo bar", "foo bar");
+ assert!((no_change.kept_rate - 1.0).abs() < 1e-6);
+ assert_eq!(no_change.predicted_new_chars, 0);
+ assert!(
+ no_change
+ .token_annotations
+ .iter()
+ .all(|&annotation| annotation == TokenAnnotation::Context)
+ );
+
+ let accepted = compute_kept_rate("old", "new", "new");
+ assert!((accepted.kept_rate - 1.0).abs() < 1e-6);
+
+ let discarded = compute_kept_rate("old", "old", "new");
+ assert!((discarded.kept_rate - 0.0).abs() < 1e-6);
+ }
+
+ #[test]
+ fn test_pure_addition() {
+ let kept = compute_kept_rate("", "brand new line\n", "brand new line\n");
+ assert_eq!(kept.kept_chars, kept.predicted_new_chars);
+ assert!(
+ kept.token_annotations
+ .iter()
+ .all(|&annotation| annotation == TokenAnnotation::Kept)
+ );
+
+ let discarded =
+ compute_kept_rate("", "brand new line\n", "something completely different\n");
+ assert!(discarded.kept_chars < discarded.predicted_new_chars);
+ }
+
+ #[test]
+ fn test_decoy_when_base_excluded() {
+ let base = " decoy.when(mock_sync_hardware_api.sp()).then_return(SpeedStatus.IDLE)\n";
+ let predicted = " decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n";
+ let final_text = " decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n";
+ let result = compute_kept_rate(base, predicted, final_text);
+ let expected_new = "mock_sync_module_hardware".len() + "speed_status".len();
+ assert_eq!(result.predicted_new_chars, expected_new);
+ assert!((result.kept_rate - 1.0).abs() < 1e-6);
+ }
+
+ #[test]
+ fn test_missing_deletion() {
+ let base = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n epr\n";
+ let predicted = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n epr\neprintln!(\"\");\n";
+ let final_text = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n eprintln!(\"\");\n";
+ let result = compute_kept_rate(base, predicted, final_text);
+ assert!(
+ result.kept_rate < 0.85,
+ "expected kept_rate < 0.85, got {}",
+ result.kept_rate
+ );
+ assert!(result.discarded_chars > 0);
+ }
+
+ #[test]
+ fn test_empty_prediction() {
+ let result = compute_kept_rate("old line\n", "", "new line\n");
+ assert!((result.kept_rate - 0.0).abs() < 1e-6);
+ }
+
+ #[test]
+ fn test_partial_kept() {
+ let result = compute_kept_rate("old\n", "alpha\nbeta\ngamma\n", "alpha\ngamma\n");
+ assert!(result.kept_chars > 0);
+ assert!(result.discarded_chars > 0);
+ assert!(result.kept_rate > 0.0 && result.kept_rate < 1.0);
+ }
+
+ #[test]
+ fn test_eprintln_token_alignment() {
+ let base = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n epr\n";
+ let predicted = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n eprintln!(\"hello world!\");\n";
+ let final_text = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n eprintln!(\"\");\n";
+ let result = compute_kept_rate(base, predicted, final_text);
+ assert!(result.discarded_chars > 0);
+ assert!(result.kept_chars > 0);
+ assert!(result.kept_rate > 0.0 && result.kept_rate < 1.0);
+ assert_eq!(result.kept_chars, 14);
+ assert_eq!(result.discarded_chars, 12);
+ }
+
+ #[test]
+ fn test_annotations_rename() {
+ let base = " foo(old_name)\n";
+ let predicted = " foo(new_name)\n";
+ let final_text = " foo(new_name)\n";
+ let result = compute_kept_rate(base, predicted, final_text);
+
+ assert_eq!(result.predicted_new_chars, "new_name".len());
+ assert_eq!(result.token_annotations.len(), tokenize(predicted).len());
+
+ for (&token, &annotation) in tokenize(predicted).iter().zip(&result.token_annotations) {
+ if token == "new_name" {
+ assert_eq!(annotation, TokenAnnotation::Kept);
+ } else {
+ assert_eq!(annotation, TokenAnnotation::Context);
+ }
+ }
+ }
+
+ #[test]
+ fn test_annotations_eprintln_coloring() {
+ let base = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n epr\n";
+ let predicted = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n eprintln!(\"hello world!\");\n";
+ let final_text = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context<Self>) {\n eprintln!(\"\");\n";
+ let result = compute_kept_rate(base, predicted, final_text);
+ let predicted_tokens = tokenize(predicted);
+
+ let eprintln_index = predicted_tokens
+ .iter()
+ .position(|&token| token == "eprintln")
+ .expect("eprintln token not found");
+
+ for annotation in &result.token_annotations[..eprintln_index] {
+ assert_eq!(*annotation, TokenAnnotation::Context);
+ }
+
+ assert_eq!(
+ &result.token_annotations[eprintln_index..=eprintln_index + 10],
+ &[
+ TokenAnnotation::Kept,
+ TokenAnnotation::Kept,
+ TokenAnnotation::Kept,
+ TokenAnnotation::Kept,
+ TokenAnnotation::Discarded,
+ TokenAnnotation::Discarded,
+ TokenAnnotation::Discarded,
+ TokenAnnotation::Discarded,
+ TokenAnnotation::Kept,
+ TokenAnnotation::Kept,
+ TokenAnnotation::Kept,
+ ]
+ );
+ assert_eq!(
+ result.token_annotations.last(),
+ Some(&TokenAnnotation::Context)
+ );
+ }
+
+ #[test]
+ fn test_repetitive_tokens_remain_discarded() {
+ let base = "foo + foo + foo + foo + foo\n".repeat(16);
+ let predicted = "foo + foo + prediction_token + foo + foo\n".repeat(16);
+ let final_text = "foo + foo + kept_token + foo + foo\n".repeat(16);
+ let result = compute_kept_rate(&base, &predicted, &final_text);
+
+ assert_eq!(result.kept_chars, 0);
+ assert_eq!(result.discarded_chars, result.predicted_new_chars);
+ assert_eq!(result.predicted_new_chars, "prediction_token".len() * 16);
+ }
+}
@@ -0,0 +1,4 @@
+#[allow(dead_code)]
+mod word_diff;
+
+pub mod kept_rate;
@@ -5,6 +5,7 @@ mod filter_languages;
mod format_prompt;
mod git;
mod headless;
+mod kept_rate;
mod load_project;
mod metrics;
mod openai_client;
@@ -1297,3 +1297,5 @@ index abc123..def456 100644
);
}
}
+
+pub use crate::kept_rate::compute_kept_rate;
@@ -5,8 +5,7 @@ use crate::{
repair,
};
use anyhow::{Context as _, Result};
-use edit_prediction::example_spec::encode_cursor_in_patch;
-use zeta_prompt::{CURSOR_MARKER, ZetaFormat, parse_zeta2_model_output};
+use zeta_prompt::{ZetaFormat, parse_zeta2_model_output, parsed_output_to_patch};
pub fn run_parse_output(example: &mut Example) -> Result<()> {
example
@@ -65,46 +64,18 @@ fn parse_zeta2_output(
.context("prompt_inputs required")?;
let parsed = parse_zeta2_model_output(actual_output, format, prompt_inputs)?;
- let range_in_excerpt = parsed.range_in_excerpt;
-
+ let range_in_excerpt = parsed.range_in_excerpt.clone();
let excerpt = prompt_inputs.cursor_excerpt.as_ref();
- let old_text = excerpt[range_in_excerpt.clone()].to_string();
- let mut new_text = parsed.new_editable_region;
-
- let cursor_offset = if let Some(offset) = new_text.find(CURSOR_MARKER) {
- new_text.replace_range(offset..offset + CURSOR_MARKER.len(), "");
- Some(offset)
- } else {
- None
- };
+ let editable_region_offset = range_in_excerpt.start;
+ let editable_region_start_line = excerpt[..editable_region_offset].matches('\n').count();
- // Normalize trailing newlines for diff generation
- let mut old_text_normalized = old_text;
+ let mut new_text = parsed.new_editable_region.clone();
if !new_text.is_empty() && !new_text.ends_with('\n') {
new_text.push('\n');
}
- if !old_text_normalized.is_empty() && !old_text_normalized.ends_with('\n') {
- old_text_normalized.push('\n');
- }
-
- let editable_region_offset = range_in_excerpt.start;
- let editable_region_start_line = excerpt[..editable_region_offset].matches('\n').count();
- let editable_region_lines = old_text_normalized.lines().count() as u32;
-
- let diff = language::unified_diff_with_context(
- &old_text_normalized,
- &new_text,
- editable_region_start_line as u32,
- editable_region_start_line as u32,
- editable_region_lines,
- );
-
- let formatted_diff = format!(
- "--- a/{path}\n+++ b/{path}\n{diff}",
- path = example.spec.cursor_path.to_string_lossy(),
- );
- let formatted_diff = encode_cursor_in_patch(&formatted_diff, cursor_offset);
+ let cursor_offset = parsed.cursor_offset_in_new_editable_region;
+ let formatted_diff = parsed_output_to_patch(prompt_inputs, parsed)?;
let actual_cursor = cursor_offset.map(|editable_region_cursor_offset| {
ActualCursor::from_editable_region(
@@ -2,8 +2,8 @@ use std::ops::Range;
use std::path::Path;
use std::sync::Arc;
-use edit_prediction::udiff::apply_diff_to_string;
use language::{char_diff, text_diff};
+use zeta_prompt::udiff::apply_diff_to_string;
use zeta_prompt::ZetaPromptInput;
@@ -653,9 +653,9 @@ pub fn compute_prediction_reversal_ratio(
#[cfg(test)]
mod tests {
use super::*;
- use edit_prediction::udiff::apply_diff_to_string;
use indoc::indoc;
use zeta_prompt::ExcerptRanges;
+ use zeta_prompt::udiff::apply_diff_to_string;
fn make_test_prompt_inputs(
content: &str,
@@ -10,13 +10,13 @@ use crate::{
reversal_tracking,
};
use anyhow::Context as _;
-use edit_prediction::udiff::{apply_diff_to_string, apply_diff_to_string_with_hunk_offset};
use gpui::AsyncApp;
use serde::Serialize;
use std::fs::File;
use std::io::BufWriter;
use std::path::Path;
use std::sync::Arc;
+use zeta_prompt::udiff::{apply_diff_to_string, apply_diff_to_string_with_hunk_offset};
pub async fn run_scoring(
example: &mut Example,
@@ -84,6 +84,7 @@ pub async fn run_scoring(
has_isolated_whitespace_changes: false,
inserted_tokens: 0,
deleted_tokens: 0,
+ kept_rate: None,
cumulative_logprob: None,
avg_logprob: None,
};
@@ -120,12 +121,14 @@ pub async fn run_scoring(
let mut best_delta_chr_f_metrics = metrics::DeltaChrFMetrics::default();
let mut best_expected_cursor: Option<usize> = None;
let mut best_patch_idx: Option<usize> = None;
+ let mut best_expected_text: Option<&str> = None;
for (idx, expected) in expected_texts.iter().enumerate() {
let delta_chr_f_metrics = metrics::delta_chr_f(original_text, expected, &actual_text);
if delta_chr_f_metrics.score > best_delta_chr_f_metrics.score {
best_delta_chr_f_metrics = delta_chr_f_metrics;
best_patch_idx = Some(idx);
+ best_expected_text = Some(expected);
}
}
@@ -184,6 +187,10 @@ pub async fn run_scoring(
prediction.actual_cursor.as_ref(),
);
+ let kept_rate = best_expected_text.map(|final_text| {
+ metrics::compute_kept_rate(original_text, &actual_text, final_text).kept_rate
+ });
+
scores.push(ExampleScore {
delta_chr_f: best_delta_chr_f_metrics.score as f32,
delta_chr_f_true_positives: best_delta_chr_f_metrics.counts.true_positives,
@@ -203,6 +210,7 @@ pub async fn run_scoring(
has_isolated_whitespace_changes,
inserted_tokens: token_changes.inserted_tokens,
deleted_tokens: token_changes.deleted_tokens,
+ kept_rate,
cumulative_logprob: prediction.cumulative_logprob,
avg_logprob: prediction.avg_logprob,
});
@@ -267,6 +275,8 @@ pub fn print_report(examples: &[Example], verbose: bool) {
let mut wrong_editable_region_count: usize = 0;
let mut wrong_editable_region_total: usize = 0;
let mut isolated_whitespace_count: usize = 0;
+ let mut kept_rate_sum: f64 = 0.0;
+ let mut kept_rate_count: usize = 0;
let mut patch_inserted_tokens: Vec<usize> = Vec::new();
let mut patch_deleted_tokens: Vec<usize> = Vec::new();
let mut predictions_with_patch: usize = 0;
@@ -359,6 +369,12 @@ pub fn print_report(examples: &[Example], verbose: bool) {
isolated_whitespace_count += 1;
}
+ // Accumulate kept rate metrics
+ if let Some(kr) = score.kept_rate {
+ kept_rate_sum += kr;
+ kept_rate_count += 1;
+ }
+
// Accumulate token change metrics (only for predictions that produced a patch)
let has_patch = example
.predictions
@@ -488,6 +504,16 @@ pub fn print_report(examples: &[Example], verbose: bool) {
println!("Isolated whitespace changes: {}", isolated_ws_str);
}
+ // Print kept rate metrics
+ if kept_rate_count > 0 {
+ let avg_kept_rate = kept_rate_sum / kept_rate_count as f64;
+ println!(
+ "Kept rate: {:.1}% avg ({} evaluated)",
+ avg_kept_rate * 100.0,
+ kept_rate_count
+ );
+ }
+
// Print token change percentile summary (only for predictions with a patch)
if !patch_inserted_tokens.is_empty() {
patch_inserted_tokens.sort_unstable();
@@ -590,6 +616,8 @@ pub struct SummaryJson {
#[serde(skip_serializing_if = "Option::is_none")]
pub wrong_editable_region_rate: Option<f32>,
pub isolated_whitespace_rate: Option<f32>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub avg_kept_rate: Option<f64>,
}
pub fn compute_summary(examples: &[Example]) -> SummaryJson {
@@ -615,6 +643,8 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson {
let mut wrong_editable_region_count: usize = 0;
let mut wrong_editable_region_total: usize = 0;
let mut isolated_whitespace_count: usize = 0;
+ let mut kept_rate_sum: f64 = 0.0;
+ let mut kept_rate_count: usize = 0;
for example in examples {
for (score_idx, score) in example.score.iter().enumerate() {
@@ -655,6 +685,12 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson {
isolated_whitespace_count += 1;
}
+ // Accumulate kept rate metrics
+ if let Some(kr) = score.kept_rate {
+ kept_rate_sum += kr;
+ kept_rate_count += 1;
+ }
+
// Accumulate cursor metrics
if let Some(exact_match) = score.cursor_exact_match {
cursor_total += 1;
@@ -729,6 +765,12 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson {
None
};
+ let avg_kept_rate = if kept_rate_count > 0 {
+ Some(kept_rate_sum / kept_rate_count as f64)
+ } else {
+ None
+ };
+
SummaryJson {
total_examples: total_scores,
avg_delta_chr_f,
@@ -761,6 +803,7 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson {
cursor_total_evaluated,
wrong_editable_region_rate,
isolated_whitespace_rate,
+ avg_kept_rate,
}
}
@@ -201,10 +201,14 @@ impl EditPredictionContextView {
multibuffer.clear(cx);
for (path, buffer, ranges, orders, _) in paths {
- let (anchor_ranges, _) =
- multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx);
- for (anchor_range, order) in anchor_ranges.into_iter().zip(orders) {
- excerpt_anchors_with_orders.push((anchor_range.start, order));
+ multibuffer.set_excerpts_for_path(path, buffer.clone(), ranges.clone(), 0, cx);
+ let snapshot = multibuffer.snapshot(cx);
+ let buffer_snapshot = buffer.read(cx).snapshot();
+ for (range, order) in ranges.into_iter().zip(orders) {
+ let text_anchor = buffer_snapshot.anchor_range_inside(range);
+ if let Some(start) = snapshot.anchor_in_buffer(text_anchor.start) {
+ excerpt_anchors_with_orders.push((start, order));
+ }
}
}
});
@@ -357,35 +357,26 @@ impl RatePredictionsModal {
});
editor.disable_header_for_buffer(new_buffer_id, cx);
- let excerpt_id = editor.buffer().update(cx, |multibuffer, cx| {
+ editor.buffer().update(cx, |multibuffer, cx| {
multibuffer.clear(cx);
- multibuffer.set_excerpts_for_buffer(new_buffer, [start..end], 0, cx);
+ multibuffer.set_excerpts_for_buffer(new_buffer.clone(), [start..end], 0, cx);
multibuffer.add_diff(diff, cx);
- multibuffer.excerpt_ids().into_iter().next()
});
- if let Some((excerpt_id, cursor_position)) =
- excerpt_id.zip(prediction.cursor_position.as_ref())
- {
+ if let Some(cursor_position) = prediction.cursor_position.as_ref() {
let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx);
- if let Some(buffer_snapshot) =
- multibuffer_snapshot.buffer_for_excerpt(excerpt_id)
- {
- let cursor_offset = prediction
- .edit_preview
- .anchor_to_offset_in_result(cursor_position.anchor)
- + cursor_position.offset;
- let cursor_anchor = buffer_snapshot.anchor_after(cursor_offset);
-
- if let Some(anchor) =
- multibuffer_snapshot.anchor_in_excerpt(excerpt_id, cursor_anchor)
- {
- editor.splice_inlays(
- &[InlayId::EditPrediction(0)],
- vec![Inlay::edit_prediction(0, anchor, "▏")],
- cx,
- );
- }
+ let cursor_offset = prediction
+ .edit_preview
+ .anchor_to_offset_in_result(cursor_position.anchor)
+ + cursor_position.offset;
+ let cursor_anchor = new_buffer.read(cx).snapshot().anchor_after(cursor_offset);
+
+ if let Some(anchor) = multibuffer_snapshot.anchor_in_excerpt(cursor_anchor) {
+ editor.splice_inlays(
+ &[InlayId::EditPrediction(0)],
+ vec![Inlay::edit_prediction(0, anchor, "▏")],
+ cx,
+ );
}
}
});
@@ -991,7 +982,6 @@ impl FeedbackCompletionProvider {
impl editor::CompletionProvider for FeedbackCompletionProvider {
fn completions(
&self,
- _excerpt_id: editor::ExcerptId,
buffer: &Entity<Buffer>,
buffer_position: language::Anchor,
_trigger: editor::CompletionContext,
@@ -7,9 +7,9 @@ use std::ops::Range;
use crate::{Editor, HighlightKey};
use collections::{HashMap, HashSet};
use gpui::{AppContext as _, Context, HighlightStyle};
-use itertools::Itertools;
use language::{BufferRow, BufferSnapshot, language_settings::LanguageSettings};
-use multi_buffer::{Anchor, ExcerptId};
+use multi_buffer::{Anchor, BufferOffset, ExcerptRange, MultiBufferSnapshot};
+use text::OffsetRangeExt as _;
use ui::{ActiveTheme, utils::ensure_minimum_contrast};
impl Editor {
@@ -25,55 +25,49 @@ impl Editor {
let accents_count = cx.theme().accents().0.len();
let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx);
- let visible_excerpts = self.visible_excerpts(false, cx);
- let excerpt_data: Vec<(ExcerptId, BufferSnapshot, Range<usize>)> = visible_excerpts
+ let visible_excerpts = self.visible_buffer_ranges(cx);
+ let excerpt_data: Vec<(
+ BufferSnapshot,
+ Range<BufferOffset>,
+ ExcerptRange<text::Anchor>,
+ )> = visible_excerpts
.into_iter()
- .filter_map(|(excerpt_id, (buffer, _, buffer_range))| {
- let buffer = buffer.read(cx);
- let buffer_snapshot = buffer.snapshot();
- if LanguageSettings::for_buffer(&buffer, cx).colorize_brackets {
- Some((excerpt_id, buffer_snapshot, buffer_range))
- } else {
- None
- }
+ .filter(|(buffer_snapshot, _, _)| {
+ let Some(buffer) = self.buffer().read(cx).buffer(buffer_snapshot.remote_id())
+ else {
+ return false;
+ };
+ LanguageSettings::for_buffer(buffer.read(cx), cx).colorize_brackets
})
.collect();
let mut fetched_tree_sitter_chunks = excerpt_data
.iter()
- .filter_map(|(excerpt_id, ..)| {
+ .filter_map(|(_, _, excerpt_range)| {
+ let key = excerpt_range.context.clone();
Some((
- *excerpt_id,
- self.bracket_fetched_tree_sitter_chunks
- .get(excerpt_id)
- .cloned()?,
+ key.clone(),
+ self.bracket_fetched_tree_sitter_chunks.get(&key).cloned()?,
))
})
- .collect::<HashMap<ExcerptId, HashSet<Range<BufferRow>>>>();
+ .collect::<HashMap<Range<text::Anchor>, HashSet<Range<BufferRow>>>>();
let bracket_matches_by_accent = cx.background_spawn(async move {
- let anchors_in_multi_buffer = |current_excerpt: ExcerptId,
- text_anchors: [text::Anchor; 4]|
- -> Option<[Option<_>; 4]> {
- multi_buffer_snapshot
- .anchors_in_excerpt(current_excerpt, text_anchors)?
- .collect_array()
- };
-
let bracket_matches_by_accent: HashMap<usize, Vec<Range<Anchor>>> =
excerpt_data.into_iter().fold(
HashMap::default(),
- |mut acc, (excerpt_id, buffer_snapshot, buffer_range)| {
- let fetched_chunks =
- fetched_tree_sitter_chunks.entry(excerpt_id).or_default();
+ |mut acc, (buffer_snapshot, buffer_range, excerpt_range)| {
+ let fetched_chunks = fetched_tree_sitter_chunks
+ .entry(excerpt_range.context.clone())
+ .or_default();
let brackets_by_accent = compute_bracket_ranges(
+ &multi_buffer_snapshot,
&buffer_snapshot,
buffer_range,
+ excerpt_range,
fetched_chunks,
- excerpt_id,
accents_count,
- &anchors_in_multi_buffer,
);
for (accent_number, new_ranges) in brackets_by_accent {
@@ -144,15 +138,20 @@ impl Editor {
}
fn compute_bracket_ranges(
+ multi_buffer_snapshot: &MultiBufferSnapshot,
buffer_snapshot: &BufferSnapshot,
- buffer_range: Range<usize>,
+ buffer_range: Range<BufferOffset>,
+ excerpt_range: ExcerptRange<text::Anchor>,
fetched_chunks: &mut HashSet<Range<BufferRow>>,
- excerpt_id: ExcerptId,
accents_count: usize,
- anchors_in_multi_buffer: &impl Fn(ExcerptId, [text::Anchor; 4]) -> Option<[Option<Anchor>; 4]>,
) -> Vec<(usize, Vec<Range<Anchor>>)> {
+ let context = excerpt_range.context.to_offset(buffer_snapshot);
+
buffer_snapshot
- .fetch_bracket_ranges(buffer_range.start..buffer_range.end, Some(fetched_chunks))
+ .fetch_bracket_ranges(
+ buffer_range.start.0..buffer_range.end.0,
+ Some(fetched_chunks),
+ )
.into_iter()
.flat_map(|(chunk_range, pairs)| {
if fetched_chunks.insert(chunk_range) {
@@ -164,37 +163,25 @@ fn compute_bracket_ranges(
.filter_map(|pair| {
let color_index = pair.color_index?;
- let buffer_open_range = buffer_snapshot.anchor_range_around(pair.open_range);
- let buffer_close_range = buffer_snapshot.anchor_range_around(pair.close_range);
- let [
- buffer_open_range_start,
- buffer_open_range_end,
- buffer_close_range_start,
- buffer_close_range_end,
- ] = anchors_in_multi_buffer(
- excerpt_id,
- [
- buffer_open_range.start,
- buffer_open_range.end,
- buffer_close_range.start,
- buffer_close_range.end,
- ],
- )?;
- let multi_buffer_open_range = buffer_open_range_start.zip(buffer_open_range_end);
- let multi_buffer_close_range = buffer_close_range_start.zip(buffer_close_range_end);
+ let mut ranges = Vec::new();
- let mut ranges = Vec::with_capacity(2);
- if let Some((open_start, open_end)) = multi_buffer_open_range {
- ranges.push(open_start..open_end);
- }
- if let Some((close_start, close_end)) = multi_buffer_close_range {
- ranges.push(close_start..close_end);
- }
- if ranges.is_empty() {
- None
- } else {
- Some((color_index % accents_count, ranges))
- }
+ if context.start <= pair.open_range.start && pair.open_range.end <= context.end {
+ let anchors = buffer_snapshot.anchor_range_inside(pair.open_range);
+ ranges.push(
+ multi_buffer_snapshot.anchor_in_buffer(anchors.start)?
+ ..multi_buffer_snapshot.anchor_in_buffer(anchors.end)?,
+ );
+ };
+
+ if context.start <= pair.close_range.start && pair.close_range.end <= context.end {
+ let anchors = buffer_snapshot.anchor_range_inside(pair.close_range);
+ ranges.push(
+ multi_buffer_snapshot.anchor_in_buffer(anchors.start)?
+ ..multi_buffer_snapshot.anchor_in_buffer(anchors.end)?,
+ );
+ };
+
+ Some((color_index % accents_count, ranges))
})
.collect()
}
@@ -1197,7 +1184,7 @@ mod foo «1{
);
}
- let buffer_snapshot = snapshot.buffer().as_singleton().unwrap().2;
+ let buffer_snapshot = snapshot.buffer().as_singleton().unwrap();
for bracket_match in buffer_snapshot
.fetch_bracket_ranges(
snapshot
@@ -1464,6 +1451,101 @@ mod foo «1{
);
}
+ #[gpui::test]
+ async fn test_multi_buffer_close_excerpts(cx: &mut gpui::TestAppContext) {
+ let comment_lines = 5;
+
+ init_test(cx, |language_settings| {
+ language_settings.defaults.colorize_brackets = Some(true);
+ });
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ path!("/a"),
+ json!({
+ "lib.rs": separate_with_comment_lines(
+ indoc! {r#"
+ fn process_data_1() {
+ let map: Option<Vec<()>> = None;
+ }
+ "#},
+ indoc! {r#"
+ fn process_data_2() {
+ let other_map: Option<Vec<()>> = None;
+ }
+ "#},
+ comment_lines,
+ )
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
+ let language_registry = project.read_with(cx, |project, _| project.languages().clone());
+ language_registry.add(rust_lang());
+
+ let buffer_1 = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/a/lib.rs"), cx)
+ })
+ .await
+ .unwrap();
+
+ let second_excerpt_start = buffer_1.read_with(cx, |buffer, _| {
+ let text = buffer.text();
+ text.lines()
+ .enumerate()
+ .find(|(_, line)| line.contains("process_data_2"))
+ .map(|(row, _)| row as u32)
+ .unwrap()
+ });
+
+ let multi_buffer = cx.new(|cx| {
+ let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite);
+ multi_buffer.set_excerpts_for_path(
+ PathKey::sorted(0),
+ buffer_1.clone(),
+ [
+ Point::new(0, 0)..Point::new(3, 0),
+ Point::new(second_excerpt_start, 0)..Point::new(second_excerpt_start + 3, 0),
+ ],
+ 0,
+ cx,
+ );
+ multi_buffer
+ });
+
+ let editor = cx.add_window(|window, cx| {
+ Editor::for_multibuffer(multi_buffer, Some(project.clone()), window, cx)
+ });
+ cx.executor().advance_clock(Duration::from_millis(100));
+ cx.executor().run_until_parked();
+
+ let editor_snapshot = editor
+ .update(cx, |editor, window, cx| editor.snapshot(window, cx))
+ .unwrap();
+ assert_eq!(
+ concat!(
+ "\n",
+ "\n",
+ "fn process_data_1\u{00ab}1()1\u{00bb} \u{00ab}1{\n",
+ " let map: Option\u{00ab}2<Vec\u{00ab}3<\u{00ab}4()4\u{00bb}>3\u{00bb}>2\u{00bb} = None;\n",
+ "}1\u{00bb}\n",
+ "\n",
+ "\n",
+ "fn process_data_2\u{00ab}1()1\u{00bb} \u{00ab}1{\n",
+ " let other_map: Option\u{00ab}2<Vec\u{00ab}3<\u{00ab}4()4\u{00bb}>3\u{00bb}>2\u{00bb} = None;\n",
+ "}1\u{00bb}\n",
+ "\n",
+ "1 hsla(207.80, 16.20%, 69.19%, 1.00)\n",
+ "2 hsla(29.00, 54.00%, 65.88%, 1.00)\n",
+ "3 hsla(286.00, 51.00%, 75.25%, 1.00)\n",
+ "4 hsla(187.00, 47.00%, 59.22%, 1.00)\n",
+ ),
+ &editor_bracket_colors_markup(&editor_snapshot),
+ "Two close excerpts from the same buffer (within same tree-sitter chunk) should both have bracket colors"
+ );
+ }
+
#[gpui::test]
// reproduction of #47846
async fn test_bracket_colorization_with_folds(cx: &mut gpui::TestAppContext) {
@@ -7,7 +7,7 @@ use project::{Completion, CompletionSource};
use settings::SnippetSortOrder;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
-use text::Anchor;
+use text::{Anchor, BufferId};
#[gpui::test]
async fn test_sort_kind(cx: &mut TestAppContext) {
@@ -393,7 +393,7 @@ impl CompletionBuilder {
kind: Option<CompletionItemKind>,
) -> Completion {
Completion {
- replace_range: Anchor::MIN..Anchor::MAX,
+ replace_range: Anchor::min_max_range_for_buffer(BufferId::new(1).unwrap()),
new_text: label.to_string(),
label: CodeLabel::plain(label.to_string(), filter_text),
documentation: None,
@@ -10,7 +10,7 @@ use language::CodeLabel;
use language::{Buffer, LanguageName, LanguageRegistry};
use lsp::CompletionItemTag;
use markdown::{CopyButtonVisibility, Markdown, MarkdownElement};
-use multi_buffer::{Anchor, ExcerptId};
+use multi_buffer::Anchor;
use ordered_float::OrderedFloat;
use project::lsp_store::CompletionDocumentation;
use project::{CodeAction, Completion, TaskSourceKind};
@@ -357,7 +357,8 @@ impl CompletionsMenu {
id: CompletionId,
sort_completions: bool,
choices: &Vec<String>,
- selection: Range<Anchor>,
+ initial_position: Anchor,
+ selection: Range<text::Anchor>,
buffer: Entity<Buffer>,
scroll_handle: Option<UniformListScrollHandle>,
snippet_sort_order: SnippetSortOrder,
@@ -365,7 +366,7 @@ impl CompletionsMenu {
let completions = choices
.iter()
.map(|choice| Completion {
- replace_range: selection.start.text_anchor..selection.end.text_anchor,
+ replace_range: selection.clone(),
new_text: choice.to_string(),
label: CodeLabel::plain(choice.to_string(), None),
match_start: None,
@@ -400,7 +401,7 @@ impl CompletionsMenu {
id,
source: CompletionsMenuSource::SnippetChoices,
sort_completions,
- initial_position: selection.start,
+ initial_position,
initial_query: None,
is_incomplete: false,
buffer,
@@ -1380,7 +1381,6 @@ impl CompletionsMenu {
#[derive(Clone)]
pub struct AvailableCodeAction {
- pub excerpt_id: ExcerptId,
pub action: CodeAction,
pub provider: Rc<dyn CodeActionProvider>,
}
@@ -1433,7 +1433,6 @@ impl CodeActionContents {
})
.chain(self.actions.iter().flat_map(|actions| {
actions.iter().map(|available| CodeActionsItem::CodeAction {
- excerpt_id: available.excerpt_id,
action: available.action.clone(),
provider: available.provider.clone(),
})
@@ -1457,7 +1456,6 @@ impl CodeActionContents {
if let Some(actions) = &self.actions {
if let Some(available) = actions.get(index) {
return Some(CodeActionsItem::CodeAction {
- excerpt_id: available.excerpt_id,
action: available.action.clone(),
provider: available.provider.clone(),
});
@@ -1477,7 +1475,6 @@ impl CodeActionContents {
pub enum CodeActionsItem {
Task(TaskSourceKind, ResolvedTask),
CodeAction {
- excerpt_id: ExcerptId,
action: CodeAction,
provider: Rc<dyn CodeActionProvider>,
},
@@ -103,7 +103,7 @@ use language::{
};
use multi_buffer::{
- Anchor, AnchorRangeExt, ExcerptId, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16,
+ Anchor, AnchorRangeExt, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16,
MultiBufferPoint, MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset, ToPoint,
};
use project::project_settings::DiagnosticSeverity;
@@ -125,7 +125,7 @@ use std::{
fmt::Debug,
iter,
num::NonZeroU32,
- ops::{self, Add, Bound, Range, Sub},
+ ops::{self, Add, Range, Sub},
sync::Arc,
};
@@ -195,10 +195,9 @@ pub struct CompanionExcerptPatch {
}
pub type ConvertMultiBufferRows = fn(
- &HashMap<ExcerptId, ExcerptId>,
&MultiBufferSnapshot,
&MultiBufferSnapshot,
- (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+ Range<MultiBufferPoint>,
) -> Vec<CompanionExcerptPatch>;
/// Decides how text in a [`MultiBuffer`] should be displayed in a buffer, handling inlay hints,
@@ -240,8 +239,6 @@ pub(crate) struct Companion {
rhs_display_map_id: EntityId,
rhs_buffer_to_lhs_buffer: HashMap<BufferId, BufferId>,
lhs_buffer_to_rhs_buffer: HashMap<BufferId, BufferId>,
- rhs_excerpt_to_lhs_excerpt: HashMap<ExcerptId, ExcerptId>,
- lhs_excerpt_to_rhs_excerpt: HashMap<ExcerptId, ExcerptId>,
rhs_rows_to_lhs_rows: ConvertMultiBufferRows,
lhs_rows_to_rhs_rows: ConvertMultiBufferRows,
rhs_custom_block_to_balancing_block: RefCell<HashMap<CustomBlockId, CustomBlockId>>,
@@ -258,8 +255,6 @@ impl Companion {
rhs_display_map_id,
rhs_buffer_to_lhs_buffer: Default::default(),
lhs_buffer_to_rhs_buffer: Default::default(),
- rhs_excerpt_to_lhs_excerpt: Default::default(),
- lhs_excerpt_to_rhs_excerpt: Default::default(),
rhs_rows_to_lhs_rows,
lhs_rows_to_rhs_rows,
rhs_custom_block_to_balancing_block: Default::default(),
@@ -287,14 +282,14 @@ impl Companion {
display_map_id: EntityId,
companion_snapshot: &MultiBufferSnapshot,
our_snapshot: &MultiBufferSnapshot,
- bounds: (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+ bounds: Range<MultiBufferPoint>,
) -> Vec<CompanionExcerptPatch> {
- let (excerpt_map, convert_fn) = if self.is_rhs(display_map_id) {
- (&self.rhs_excerpt_to_lhs_excerpt, self.rhs_rows_to_lhs_rows)
+ let convert_fn = if self.is_rhs(display_map_id) {
+ self.rhs_rows_to_lhs_rows
} else {
- (&self.lhs_excerpt_to_rhs_excerpt, self.lhs_rows_to_rhs_rows)
+ self.lhs_rows_to_rhs_rows
};
- convert_fn(excerpt_map, companion_snapshot, our_snapshot, bounds)
+ convert_fn(companion_snapshot, our_snapshot, bounds)
}
pub(crate) fn convert_point_from_companion(
@@ -304,20 +299,15 @@ impl Companion {
companion_snapshot: &MultiBufferSnapshot,
point: MultiBufferPoint,
) -> Range<MultiBufferPoint> {
- let (excerpt_map, convert_fn) = if self.is_rhs(display_map_id) {
- (&self.lhs_excerpt_to_rhs_excerpt, self.lhs_rows_to_rhs_rows)
+ let convert_fn = if self.is_rhs(display_map_id) {
+ self.lhs_rows_to_rhs_rows
} else {
- (&self.rhs_excerpt_to_lhs_excerpt, self.rhs_rows_to_lhs_rows)
+ self.rhs_rows_to_lhs_rows
};
- let excerpt = convert_fn(
- excerpt_map,
- our_snapshot,
- companion_snapshot,
- (Bound::Included(point), Bound::Included(point)),
- )
- .into_iter()
- .next();
+ let excerpt = convert_fn(our_snapshot, companion_snapshot, point..point)
+ .into_iter()
+ .next();
let Some(excerpt) = excerpt else {
return Point::zero()..our_snapshot.max_point();
@@ -332,20 +322,15 @@ impl Companion {
companion_snapshot: &MultiBufferSnapshot,
point: MultiBufferPoint,
) -> Range<MultiBufferPoint> {
- let (excerpt_map, convert_fn) = if self.is_rhs(display_map_id) {
- (&self.rhs_excerpt_to_lhs_excerpt, self.rhs_rows_to_lhs_rows)
+ let convert_fn = if self.is_rhs(display_map_id) {
+ self.rhs_rows_to_lhs_rows
} else {
- (&self.lhs_excerpt_to_rhs_excerpt, self.lhs_rows_to_rhs_rows)
+ self.lhs_rows_to_rhs_rows
};
- let excerpt = convert_fn(
- excerpt_map,
- companion_snapshot,
- our_snapshot,
- (Bound::Included(point), Bound::Included(point)),
- )
- .into_iter()
- .next();
+ let excerpt = convert_fn(companion_snapshot, our_snapshot, point..point)
+ .into_iter()
+ .next();
let Some(excerpt) = excerpt else {
return Point::zero()..companion_snapshot.max_point();
@@ -353,30 +338,6 @@ impl Companion {
excerpt.patch.edit_for_old_position(point).new
}
- pub(crate) fn companion_excerpt_to_excerpt(
- &self,
- display_map_id: EntityId,
- ) -> &HashMap<ExcerptId, ExcerptId> {
- if self.is_rhs(display_map_id) {
- &self.lhs_excerpt_to_rhs_excerpt
- } else {
- &self.rhs_excerpt_to_lhs_excerpt
- }
- }
-
- #[cfg(test)]
- pub(crate) fn excerpt_mappings(
- &self,
- ) -> (
- &HashMap<ExcerptId, ExcerptId>,
- &HashMap<ExcerptId, ExcerptId>,
- ) {
- (
- &self.lhs_excerpt_to_rhs_excerpt,
- &self.rhs_excerpt_to_lhs_excerpt,
- )
- }
-
fn buffer_to_companion_buffer(&self, display_map_id: EntityId) -> &HashMap<BufferId, BufferId> {
if self.is_rhs(display_map_id) {
&self.rhs_buffer_to_lhs_buffer
@@ -385,24 +346,6 @@ impl Companion {
}
}
- pub(crate) fn add_excerpt_mapping(&mut self, lhs_id: ExcerptId, rhs_id: ExcerptId) {
- self.lhs_excerpt_to_rhs_excerpt.insert(lhs_id, rhs_id);
- self.rhs_excerpt_to_lhs_excerpt.insert(rhs_id, lhs_id);
- }
-
- pub(crate) fn remove_excerpt_mappings(
- &mut self,
- lhs_ids: impl IntoIterator<Item = ExcerptId>,
- rhs_ids: impl IntoIterator<Item = ExcerptId>,
- ) {
- for id in lhs_ids {
- self.lhs_excerpt_to_rhs_excerpt.remove(&id);
- }
- for id in rhs_ids {
- self.rhs_excerpt_to_lhs_excerpt.remove(&id);
- }
- }
-
pub(crate) fn lhs_to_rhs_buffer(&self, lhs_buffer_id: BufferId) -> Option<BufferId> {
self.lhs_buffer_to_rhs_buffer.get(&lhs_buffer_id).copied()
}
@@ -457,10 +400,13 @@ impl DisplayMap {
diagnostics_max_severity: DiagnosticSeverity,
cx: &mut Context<Self>,
) -> Self {
- let buffer_subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
-
let tab_size = Self::tab_size(&buffer, cx);
+ // Important: obtain the snapshot BEFORE creating the subscription.
+ // snapshot() may call sync() which publishes edits. If we subscribe first,
+ // those edits would be captured but the InlayMap would already be at the
+ // post-edit state, causing a desync.
let buffer_snapshot = buffer.read(cx).snapshot(cx);
+ let buffer_subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
let crease_map = CreaseMap::new(&buffer_snapshot);
let (inlay_map, snapshot) = InlayMap::new(buffer_snapshot);
let (fold_map, snapshot) = FoldMap::new(snapshot);
@@ -540,8 +486,7 @@ impl DisplayMap {
.wrap_map
.update(cx, |wrap_map, cx| wrap_map.sync(snapshot, edits, cx));
- let (snapshot, edits) =
- writer.unfold_intersecting([Anchor::min()..Anchor::max()], true);
+ let (snapshot, edits) = writer.unfold_intersecting([Anchor::Min..Anchor::Max], true);
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
let (snapshot, _edits) = self
.wrap_map
@@ -632,18 +577,6 @@ impl DisplayMap {
self.companion.as_ref().map(|(_, c)| c)
}
- pub(crate) fn companion_excerpt_to_my_excerpt(
- &self,
- their_id: ExcerptId,
- cx: &App,
- ) -> Option<ExcerptId> {
- let (_, companion) = self.companion.as_ref()?;
- let c = companion.read(cx);
- c.companion_excerpt_to_excerpt(self.entity_id)
- .get(&their_id)
- .copied()
- }
-
fn sync_through_wrap(&mut self, cx: &mut App) -> (WrapSnapshot, WrapPatch) {
let tab_size = Self::tab_size(&self.buffer, cx);
let buffer_snapshot = self.buffer.read(cx).snapshot(cx);
@@ -1054,17 +987,10 @@ impl DisplayMap {
return;
}
- let excerpt_ids = snapshot
- .excerpts()
- .filter(|(_, buf, _)| buf.remote_id() == buffer_id)
- .map(|(id, _, _)| id)
- .collect::<Vec<_>>();
-
let base_placeholder = self.fold_placeholder.clone();
let creases = ranges.into_iter().filter_map(|folding_range| {
- let mb_range = excerpt_ids.iter().find_map(|&id| {
- snapshot.anchor_range_in_excerpt(id, folding_range.range.clone())
- })?;
+ let mb_range =
+ snapshot.buffer_anchor_range_to_anchor_range(folding_range.range.clone())?;
let placeholder = if let Some(collapsed_text) = folding_range.collapsed_text {
FoldPlaceholder {
render: Arc::new({
@@ -4156,4 +4082,64 @@ pub mod tests {
chunks,
);
}
+
+ /// Regression test: Creating a DisplayMap when the MultiBuffer has pending
+ /// unsynced changes should not cause a desync between the subscription edits
+ /// and the InlayMap's buffer state.
+ ///
+ /// The bug occurred because:
+ /// 1. DisplayMap::new created a subscription first
+ /// 2. Then called snapshot() which synced and published edits
+ /// 3. InlayMap was created with the post-sync snapshot
+ /// 4. But the subscription captured the sync edits, leading to double-application
+ #[gpui::test]
+ fn test_display_map_subscription_ordering(cx: &mut gpui::App) {
+ init_test(cx, &|_| {});
+
+ // Create a buffer with some initial text
+ let buffer = cx.new(|cx| Buffer::local("initial", cx));
+ let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx));
+
+ // Edit the buffer. This sets buffer_changed_since_sync = true.
+ // Importantly, do NOT call multibuffer.snapshot() yet.
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit([(0..0, "prefix ")], None, cx);
+ });
+
+ // Create the DisplayMap. In the buggy code, this would:
+ // 1. Create subscription (empty)
+ // 2. Call snapshot() which syncs and publishes edits E1
+ // 3. Create InlayMap with post-E1 snapshot
+ // 4. Subscription now has E1, but InlayMap is already at post-E1 state
+ let map = cx.new(|cx| {
+ DisplayMap::new(
+ multibuffer.clone(),
+ font("Helvetica"),
+ px(14.0),
+ None,
+ 1,
+ 1,
+ FoldPlaceholder::test(),
+ DiagnosticSeverity::Warning,
+ cx,
+ )
+ });
+
+ // Verify initial state is correct
+ let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
+ assert_eq!(snapshot.text(), "prefix initial");
+
+ // Make another edit
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit([(7..7, "more ")], None, cx);
+ });
+
+ // This would crash in the buggy code because:
+ // - InlayMap expects edits from V1 to V2
+ // - But subscription has E1 ∘ E2 (from V0 to V2)
+ // - The calculation `buffer_edit.new.end + (cursor.end().0 - buffer_edit.old.end)`
+ // would produce an offset exceeding the buffer length
+ let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
+ assert_eq!(snapshot.text(), "prefix more initial");
+ }
}
@@ -11,8 +11,8 @@ use collections::{Bound, HashMap, HashSet};
use gpui::{AnyElement, App, EntityId, Pixels, Window};
use language::{Patch, Point};
use multi_buffer::{
- Anchor, ExcerptId, ExcerptInfo, MultiBuffer, MultiBufferOffset, MultiBufferPoint,
- MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset, ToPoint as _,
+ Anchor, ExcerptBoundaryInfo, MultiBuffer, MultiBufferOffset, MultiBufferPoint, MultiBufferRow,
+ MultiBufferSnapshot, RowInfo, ToOffset, ToPoint as _,
};
use parking_lot::Mutex;
use std::{
@@ -298,10 +298,10 @@ pub struct BlockContext<'a, 'b> {
pub indent_guide_padding: Pixels,
}
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum BlockId {
- ExcerptBoundary(ExcerptId),
- FoldedBuffer(ExcerptId),
+ ExcerptBoundary(Anchor),
+ FoldedBuffer(BufferId),
Custom(CustomBlockId),
Spacer(SpacerId),
}
@@ -310,10 +310,8 @@ impl From<BlockId> for ElementId {
fn from(value: BlockId) -> Self {
match value {
BlockId::Custom(CustomBlockId(id)) => ("Block", id).into(),
- BlockId::ExcerptBoundary(excerpt_id) => {
- ("ExcerptBoundary", EntityId::from(excerpt_id)).into()
- }
- BlockId::FoldedBuffer(id) => ("FoldedBuffer", EntityId::from(id)).into(),
+ BlockId::ExcerptBoundary(anchor) => anchor.opaque_id().unwrap().into(),
+ BlockId::FoldedBuffer(id) => ("FoldedBuffer", EntityId::from(id.to_proto())).into(),
BlockId::Spacer(SpacerId(id)) => ("Spacer", id).into(),
}
}
@@ -323,7 +321,7 @@ impl std::fmt::Display for BlockId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Custom(id) => write!(f, "Block({id:?})"),
- Self::ExcerptBoundary(id) => write!(f, "ExcerptHeader({id:?})"),
+ Self::ExcerptBoundary(id) => write!(f, "ExcerptBoundary({id:?})"),
Self::FoldedBuffer(id) => write!(f, "FoldedBuffer({id:?})"),
Self::Spacer(id) => write!(f, "Spacer({id:?})"),
}
@@ -340,15 +338,15 @@ struct Transform {
pub enum Block {
Custom(Arc<CustomBlock>),
FoldedBuffer {
- first_excerpt: ExcerptInfo,
+ first_excerpt: ExcerptBoundaryInfo,
height: u32,
},
ExcerptBoundary {
- excerpt: ExcerptInfo,
+ excerpt: ExcerptBoundaryInfo,
height: u32,
},
BufferHeader {
- excerpt: ExcerptInfo,
+ excerpt: ExcerptBoundaryInfo,
height: u32,
},
Spacer {
@@ -365,12 +363,14 @@ impl Block {
Block::ExcerptBoundary {
excerpt: next_excerpt,
..
- } => BlockId::ExcerptBoundary(next_excerpt.id),
- Block::FoldedBuffer { first_excerpt, .. } => BlockId::FoldedBuffer(first_excerpt.id),
+ } => BlockId::ExcerptBoundary(next_excerpt.start_anchor),
+ Block::FoldedBuffer { first_excerpt, .. } => {
+ BlockId::FoldedBuffer(first_excerpt.buffer_id())
+ }
Block::BufferHeader {
excerpt: next_excerpt,
..
- } => BlockId::ExcerptBoundary(next_excerpt.id),
+ } => BlockId::ExcerptBoundary(next_excerpt.start_anchor),
Block::Spacer { id, .. } => BlockId::Spacer(*id),
}
}
@@ -1174,10 +1174,10 @@ impl BlockMap {
let wrap_row = wrap_row_for(Point::new(excerpt_boundary.row.0, 0), Bias::Left);
let new_buffer_id = match (&excerpt_boundary.prev, &excerpt_boundary.next) {
- (None, next) => Some(next.buffer_id),
+ (None, next) => Some(next.buffer_id()),
(Some(prev), next) => {
- if prev.buffer_id != next.buffer_id {
- Some(next.buffer_id)
+ if prev.buffer_id() != next.buffer_id() {
+ Some(next.buffer_id())
} else {
None
}
@@ -1195,7 +1195,7 @@ impl BlockMap {
let mut last_excerpt_end_row = first_excerpt.end_row;
while let Some(next_boundary) = boundaries.peek() {
- if next_boundary.next.buffer_id == new_buffer_id {
+ if next_boundary.next.buffer_id() == new_buffer_id {
last_excerpt_end_row = next_boundary.next.end_row;
} else {
break;
@@ -1254,12 +1254,24 @@ impl BlockMap {
let our_buffer = wrap_snapshot.buffer_snapshot();
let companion_buffer = companion_snapshot.buffer_snapshot();
- let patches = companion.convert_rows_to_companion(
+ let range = match bounds {
+ (Bound::Included(start), Bound::Excluded(end)) => start..end,
+ (Bound::Included(start), Bound::Unbounded) => start..wrap_snapshot.buffer().max_point(),
+ _ => unreachable!(),
+ };
+ let mut patches = companion.convert_rows_to_companion(
display_map_id,
companion_buffer,
our_buffer,
- bounds,
+ range,
);
+ if let Some(patch) = patches.last()
+ && let Bound::Excluded(end) = bounds.1
+ && end == wrap_snapshot.buffer().max_point()
+ && patch.source_excerpt_range.is_empty()
+ {
+ patches.pop();
+ }
let mut our_inlay_point_cursor = wrap_snapshot.inlay_point_cursor();
let mut our_fold_point_cursor = wrap_snapshot.fold_point_cursor();
@@ -1391,18 +1403,15 @@ impl BlockMap {
}
}
- // Main loop: process one hunk/group at a time, possibly inserting spacers before and after.
while let Some(source_point) = source_points.next() {
let mut current_boundary = source_point;
let current_range = excerpt.patch.edit_for_old_position(current_boundary).new;
- // This can only occur at the end of an excerpt.
if current_boundary.column > 0 {
debug_assert_eq!(current_boundary, excerpt.source_excerpt_range.end);
break;
}
- // Align the two sides at the start of this group.
let (delta_at_start, mut spacer_at_start) = determine_spacer(
&mut our_wrapper,
&mut companion_wrapper,
@@ -1434,7 +1443,6 @@ impl BlockMap {
source_points.next();
}
- // This can only occur at the end of an excerpt.
if current_boundary.column > 0 {
debug_assert_eq!(current_boundary, excerpt.source_excerpt_range.end);
break;
@@ -1538,7 +1546,8 @@ impl BlockMap {
| Block::BufferHeader {
excerpt: excerpt_b, ..
},
- ) => Some(excerpt_a.id).cmp(&Some(excerpt_b.id)),
+ ) => Some(excerpt_a.start_text_anchor().opaque_id())
+ .cmp(&Some(excerpt_b.start_text_anchor().opaque_id())),
(
Block::ExcerptBoundary { .. } | Block::BufferHeader { .. },
Block::Spacer { .. } | Block::Custom(_),
@@ -2034,6 +2043,7 @@ impl BlockMapWriter<'_> {
multi_buffer: &MultiBuffer,
cx: &App,
) {
+ let multi_buffer_snapshot = multi_buffer.snapshot(cx);
let mut ranges = Vec::new();
let mut companion_buffer_ids = HashSet::default();
for buffer_id in buffer_ids {
@@ -2042,7 +2052,7 @@ impl BlockMapWriter<'_> {
} else {
self.block_map.folded_buffers.remove(&buffer_id);
}
- ranges.extend(multi_buffer.excerpt_ranges_for_buffer(buffer_id, cx));
+ ranges.extend(multi_buffer_snapshot.range_for_buffer(buffer_id));
if let Some(companion) = &self.companion
&& companion.inverse.is_some()
{
@@ -2268,14 +2278,16 @@ impl BlockSnapshot {
let custom_block = self.custom_blocks_by_id.get(&custom_block_id)?;
return Some(Block::Custom(custom_block.clone()));
}
- BlockId::ExcerptBoundary(next_excerpt_id) => {
- let excerpt_range = buffer.range_for_excerpt(next_excerpt_id)?;
- self.wrap_snapshot
- .make_wrap_point(excerpt_range.start, Bias::Left)
+ BlockId::ExcerptBoundary(start_anchor) => {
+ let start_point = start_anchor.to_point(&buffer);
+ self.wrap_snapshot.make_wrap_point(start_point, Bias::Left)
}
- BlockId::FoldedBuffer(excerpt_id) => self
- .wrap_snapshot
- .make_wrap_point(buffer.range_for_excerpt(excerpt_id)?.start, Bias::Left),
+ BlockId::FoldedBuffer(buffer_id) => self.wrap_snapshot.make_wrap_point(
+ buffer
+ .anchor_in_excerpt(buffer.excerpts_for_buffer(buffer_id).next()?.context.start)?
+ .to_point(buffer),
+ Bias::Left,
+ ),
BlockId::Spacer(_) => return None,
};
let wrap_row = wrap_point.row();
@@ -2571,7 +2583,7 @@ impl BlockChunks<'_> {
}
pub struct StickyHeaderExcerpt<'a> {
- pub excerpt: &'a ExcerptInfo,
+ pub excerpt: &'a ExcerptBoundaryInfo,
}
impl<'a> Iterator for BlockChunks<'a> {
@@ -3096,7 +3108,13 @@ mod tests {
);
multi_buffer
});
- let excerpt_ids = multi_buffer.read_with(cx, |mb, _| mb.excerpt_ids());
+ let excerpt_start_anchors = multi_buffer.read_with(cx, |mb, _| {
+ let snapshot = mb.snapshot(cx);
+ snapshot
+ .excerpts()
+ .map(|e| snapshot.anchor_in_excerpt(e.context.start).unwrap())
+ .collect::<Vec<_>>()
+ });
let font = test_font();
let font_size = px(14.);
@@ -3129,9 +3147,9 @@ mod tests {
assert_eq!(
blocks,
vec![
- (0..1, BlockId::ExcerptBoundary(excerpt_ids[0])), // path, header
- (3..4, BlockId::ExcerptBoundary(excerpt_ids[1])), // path, header
- (6..7, BlockId::ExcerptBoundary(excerpt_ids[2])), // path, header
+ (0..1, BlockId::ExcerptBoundary(excerpt_start_anchors[0])), // path, header
+ (3..4, BlockId::ExcerptBoundary(excerpt_start_anchors[1])), // path, header
+ (6..7, BlockId::ExcerptBoundary(excerpt_start_anchors[2])), // path, header
]
);
}
@@ -3447,13 +3465,13 @@ mod tests {
],
cx,
);
- assert_eq!(multibuffer.read(cx).excerpt_ids().len(), 6);
+ assert_eq!(multibuffer.read(cx).snapshot(cx).excerpts().count(), 6);
multibuffer
});
let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
let buffer_ids = buffer_snapshot
.excerpts()
- .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id())
+ .map(|excerpt| excerpt.context.start.buffer_id)
.dedup()
.collect::<Vec<_>>();
assert_eq!(buffer_ids.len(), 3);
@@ -3800,7 +3818,7 @@ mod tests {
let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
let buffer_ids = buffer_snapshot
.excerpts()
- .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id())
+ .map(|excerpt| excerpt.context.start.buffer_id)
.dedup()
.collect::<Vec<_>>();
assert_eq!(buffer_ids.len(), 1);
@@ -4008,17 +4026,16 @@ mod tests {
wrap_map.sync(tab_snapshot, tab_edits, cx)
});
let mut block_map = block_map.write(wraps_snapshot, wrap_edits, None);
- let (unfolded_buffers, folded_buffers) = buffer.read_with(cx, |buffer, _| {
- let folded_buffers: Vec<_> =
- block_map.block_map.folded_buffers.iter().cloned().collect();
- let mut unfolded_buffers = buffer.excerpt_buffer_ids();
- unfolded_buffers.dedup();
- log::debug!("All buffers {unfolded_buffers:?}");
- log::debug!("Folded buffers {folded_buffers:?}");
- unfolded_buffers.retain(|buffer_id| {
- !block_map.block_map.folded_buffers.contains(buffer_id)
- });
- (unfolded_buffers, folded_buffers)
+ let folded_buffers: Vec<_> =
+ block_map.block_map.folded_buffers.iter().cloned().collect();
+ let mut unfolded_buffers = buffer_snapshot
+ .buffer_ids_for_range(Anchor::Min..Anchor::Max)
+ .collect::<Vec<_>>();
+ unfolded_buffers.dedup();
+ log::debug!("All buffers {unfolded_buffers:?}");
+ log::debug!("Folded buffers {folded_buffers:?}");
+ unfolded_buffers.retain(|buffer_id| {
+ !block_map.block_map.folded_buffers.contains(buffer_id)
});
let mut folded_count = folded_buffers.len();
let mut unfolded_count = unfolded_buffers.len();
@@ -4039,12 +4056,14 @@ mod tests {
log::info!("Folding {buffer_to_fold:?}");
let related_excerpts = buffer_snapshot
.excerpts()
- .filter_map(|(excerpt_id, buffer, range)| {
- if buffer.remote_id() == buffer_to_fold {
+ .filter_map(|excerpt| {
+ if excerpt.context.start.buffer_id == buffer_to_fold {
Some((
- excerpt_id,
- buffer
- .text_for_range(range.context)
+ excerpt.context.start,
+ buffer_snapshot
+ .buffer_for_id(buffer_to_fold)
+ .unwrap()
+ .text_for_range(excerpt.context)
.collect::<String>(),
))
} else {
@@ -4518,7 +4537,7 @@ mod tests {
let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
let buffer_ids = buffer_snapshot
.excerpts()
- .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id())
+ .map(|excerpt| excerpt.context.start.buffer_id)
.dedup()
.collect::<Vec<_>>();
assert_eq!(buffer_ids.len(), 1);
@@ -4563,7 +4582,7 @@ mod tests {
let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
let buffer_ids = buffer_snapshot
.excerpts()
- .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id())
+ .map(|excerpt| excerpt.context.start.buffer_id)
.dedup()
.collect::<Vec<_>>();
assert_eq!(buffer_ids.len(), 1);
@@ -4635,11 +4654,6 @@ mod tests {
let subscription =
rhs_multibuffer.update(cx, |rhs_multibuffer, _| rhs_multibuffer.subscribe());
- let lhs_excerpt_id =
- lhs_multibuffer.read_with(cx, |mb, cx| mb.snapshot(cx).excerpts().next().unwrap().0);
- let rhs_excerpt_id =
- rhs_multibuffer.read_with(cx, |mb, cx| mb.snapshot(cx).excerpts().next().unwrap().0);
-
let lhs_buffer_snapshot = cx.update(|cx| lhs_multibuffer.read(cx).snapshot(cx));
let (mut _lhs_inlay_map, lhs_inlay_snapshot) = InlayMap::new(lhs_buffer_snapshot);
let (mut _lhs_fold_map, lhs_fold_snapshot) = FoldMap::new(lhs_inlay_snapshot);
@@ -4661,13 +4675,11 @@ mod tests {
let rhs_entity_id = rhs_multibuffer.entity_id();
let companion = cx.new(|_| {
- let mut c = Companion::new(
+ Companion::new(
rhs_entity_id,
convert_rhs_rows_to_lhs,
convert_lhs_rows_to_rhs,
- );
- c.add_excerpt_mapping(lhs_excerpt_id, rhs_excerpt_id);
- c
+ )
});
let rhs_edits = Patch::new(vec![text::Edit {
@@ -363,7 +363,7 @@ pub struct ItemSummary {
impl Default for ItemSummary {
fn default() -> Self {
Self {
- range: Anchor::min()..Anchor::min(),
+ range: Anchor::Min..Anchor::Min,
}
}
}
@@ -185,16 +185,18 @@ impl FoldMapWriter<'_> {
continue;
}
+ let fold_range = buffer.anchor_after(range.start)..buffer.anchor_before(range.end);
// For now, ignore any ranges that span an excerpt boundary.
- let fold_range =
- FoldRange(buffer.anchor_after(range.start)..buffer.anchor_before(range.end));
- if fold_range.0.start.excerpt_id != fold_range.0.end.excerpt_id {
+ if buffer
+ .anchor_range_to_buffer_anchor_range(fold_range.clone())
+ .is_none()
+ {
continue;
}
folds.push(Fold {
id: FoldId(post_inc(&mut self.0.next_fold_id.0)),
- range: fold_range,
+ range: FoldRange(fold_range),
placeholder: fold_text,
});
@@ -510,7 +512,7 @@ impl FoldMap {
.snapshot
.folds
.cursor::<FoldRange>(&inlay_snapshot.buffer);
- folds_cursor.seek(&FoldRange(anchor..Anchor::max()), Bias::Left);
+ folds_cursor.seek(&FoldRange(anchor..Anchor::Max), Bias::Left);
let mut folds = iter::from_fn({
let inlay_snapshot = &inlay_snapshot;
@@ -1226,7 +1228,7 @@ impl DerefMut for FoldRange {
impl Default for FoldRange {
fn default() -> Self {
- Self(Anchor::min()..Anchor::max())
+ Self(Anchor::Min..Anchor::Max)
}
}
@@ -1262,10 +1264,10 @@ pub struct FoldSummary {
impl Default for FoldSummary {
fn default() -> Self {
Self {
- start: Anchor::min(),
- end: Anchor::max(),
- min_start: Anchor::max(),
- max_end: Anchor::min(),
+ start: Anchor::Min,
+ end: Anchor::Max,
+ min_start: Anchor::Max,
+ max_end: Anchor::Min,
count: 0,
}
}
@@ -1342,7 +1342,7 @@ mod tests {
use settings::SettingsStore;
use std::{cmp::Reverse, env, sync::Arc};
use sum_tree::TreeMap;
- use text::{Patch, Rope};
+ use text::{BufferId, Patch, Rope};
use util::RandomCharIter;
use util::post_inc;
@@ -1351,10 +1351,10 @@ mod tests {
assert_eq!(
Inlay::hint(
InlayId::Hint(0),
- Anchor::min(),
+ Anchor::Min,
&InlayHint {
label: InlayHintLabel::String("a".to_string()),
- position: text::Anchor::MIN,
+ position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()),
padding_left: false,
padding_right: false,
tooltip: None,
@@ -1371,10 +1371,10 @@ mod tests {
assert_eq!(
Inlay::hint(
InlayId::Hint(0),
- Anchor::min(),
+ Anchor::Min,
&InlayHint {
label: InlayHintLabel::String("a".to_string()),
- position: text::Anchor::MIN,
+ position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()),
padding_left: true,
padding_right: true,
tooltip: None,
@@ -1391,10 +1391,10 @@ mod tests {
assert_eq!(
Inlay::hint(
InlayId::Hint(0),
- Anchor::min(),
+ Anchor::Min,
&InlayHint {
label: InlayHintLabel::String(" a ".to_string()),
- position: text::Anchor::MIN,
+ position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()),
padding_left: false,
padding_right: false,
tooltip: None,
@@ -1411,10 +1411,10 @@ mod tests {
assert_eq!(
Inlay::hint(
InlayId::Hint(0),
- Anchor::min(),
+ Anchor::Min,
&InlayHint {
label: InlayHintLabel::String(" a ".to_string()),
- position: text::Anchor::MIN,
+ position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()),
padding_left: true,
padding_right: true,
tooltip: None,
@@ -1434,10 +1434,10 @@ mod tests {
assert_eq!(
Inlay::hint(
InlayId::Hint(0),
- Anchor::min(),
+ Anchor::Min,
&InlayHint {
label: InlayHintLabel::String("🎨".to_string()),
- position: text::Anchor::MIN,
+ position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()),
padding_left: true,
padding_right: true,
tooltip: None,
@@ -8,7 +8,7 @@ use language::point_from_lsp;
use multi_buffer::Anchor;
use project::{DocumentColor, InlayId};
use settings::Settings as _;
-use text::{Bias, BufferId, OffsetRangeExt as _};
+use text::{Bias, BufferId};
use ui::{App, Context, Window};
use util::post_inc;
@@ -160,9 +160,9 @@ impl Editor {
}
let buffers_to_query = self
- .visible_excerpts(true, cx)
- .into_values()
- .map(|(buffer, ..)| buffer)
+ .visible_buffers(cx)
+ .into_iter()
+ .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx))
.chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
.filter(|editor_buffer| {
let editor_buffer_id = editor_buffer.read(cx).remote_id();
@@ -184,9 +184,9 @@ impl Editor {
buffers_to_query
.into_iter()
.filter_map(|buffer| {
- let buffer_id = buffer.read(cx).remote_id();
+ let buffer_snapshot = buffer.read(cx).snapshot();
let colors_task = lsp_store.document_colors(buffer, cx)?;
- Some(async move { (buffer_id, colors_task.await) })
+ Some(async move { (buffer_snapshot, colors_task.await) })
})
.collect::<Vec<_>>()
})
@@ -200,40 +200,21 @@ impl Editor {
if all_colors.is_empty() {
return;
}
- let Ok((multi_buffer_snapshot, editor_excerpts)) = editor.update(cx, |editor, cx| {
- let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
- let editor_excerpts = multi_buffer_snapshot.excerpts().fold(
- HashMap::default(),
- |mut acc, (excerpt_id, buffer_snapshot, excerpt_range)| {
- let excerpt_data = acc
- .entry(buffer_snapshot.remote_id())
- .or_insert_with(Vec::new);
- let excerpt_point_range =
- excerpt_range.context.to_point_utf16(buffer_snapshot);
- excerpt_data.push((
- excerpt_id,
- buffer_snapshot.clone(),
- excerpt_point_range,
- ));
- acc
- },
- );
- (multi_buffer_snapshot, editor_excerpts)
- }) else {
+ let Some(multi_buffer_snapshot) = editor
+ .update(cx, |editor, cx| editor.buffer.read(cx).snapshot(cx))
+ .ok()
+ else {
return;
};
let mut new_editor_colors: HashMap<BufferId, Vec<(Range<Anchor>, DocumentColor)>> =
HashMap::default();
- for (buffer_id, colors) in all_colors {
- let Some(excerpts) = editor_excerpts.get(&buffer_id) else {
- continue;
- };
+ for (buffer_snapshot, colors) in all_colors {
match colors {
Ok(colors) => {
if colors.colors.is_empty() {
new_editor_colors
- .entry(buffer_id)
+ .entry(buffer_snapshot.remote_id())
.or_insert_with(Vec::new)
.clear();
} else {
@@ -241,41 +222,33 @@ impl Editor {
let color_start = point_from_lsp(color.lsp_range.start);
let color_end = point_from_lsp(color.lsp_range.end);
- for (excerpt_id, buffer_snapshot, excerpt_range) in excerpts {
- if !excerpt_range.contains(&color_start.0)
- || !excerpt_range.contains(&color_end.0)
- {
- continue;
- }
- let start = buffer_snapshot.anchor_before(
- buffer_snapshot.clip_point_utf16(color_start, Bias::Left),
- );
- let end = buffer_snapshot.anchor_after(
- buffer_snapshot.clip_point_utf16(color_end, Bias::Right),
- );
- let Some(range) = multi_buffer_snapshot
- .anchor_range_in_excerpt(*excerpt_id, start..end)
- else {
- continue;
- };
-
- let new_buffer_colors =
- new_editor_colors.entry(buffer_id).or_insert_with(Vec::new);
-
- let (Ok(i) | Err(i)) =
- new_buffer_colors.binary_search_by(|(probe, _)| {
- probe
- .start
- .cmp(&range.start, &multi_buffer_snapshot)
- .then_with(|| {
- probe
- .end
- .cmp(&range.end, &multi_buffer_snapshot)
- })
- });
- new_buffer_colors.insert(i, (range, color));
- break;
- }
+ let Some(range) = multi_buffer_snapshot
+ .buffer_anchor_range_to_anchor_range(
+ buffer_snapshot.anchor_range_outside(
+ buffer_snapshot
+ .clip_point_utf16(color_start, Bias::Left)
+ ..buffer_snapshot
+ .clip_point_utf16(color_end, Bias::Right),
+ ),
+ )
+ else {
+ continue;
+ };
+
+ let new_buffer_colors = new_editor_colors
+ .entry(buffer_snapshot.remote_id())
+ .or_insert_with(Vec::new);
+
+ let (Ok(i) | Err(i)) =
+ new_buffer_colors.binary_search_by(|(probe, _)| {
+ probe
+ .start
+ .cmp(&range.start, &multi_buffer_snapshot)
+ .then_with(|| {
+ probe.end.cmp(&range.end, &multi_buffer_snapshot)
+ })
+ });
+ new_buffer_colors.insert(i, (range, color));
}
}
}
@@ -62,10 +62,10 @@ impl Editor {
multi_buffer_snapshot: &MultiBufferSnapshot,
cx: &Context<Self>,
) -> bool {
- let Some(excerpt) = multi_buffer_snapshot.excerpt_containing(cursor..cursor) else {
+ let Some((anchor, _)) = multi_buffer_snapshot.anchor_to_buffer_anchor(cursor) else {
return false;
};
- let Some(buffer) = self.buffer.read(cx).buffer(excerpt.buffer_id()) else {
+ let Some(buffer) = self.buffer.read(cx).buffer(anchor.buffer_id) else {
return false;
};
lsp_symbols_enabled(buffer.read(cx), cx)
@@ -77,19 +77,12 @@ impl Editor {
&self,
cursor: Anchor,
multi_buffer_snapshot: &MultiBufferSnapshot,
- cx: &Context<Self>,
+ _cx: &Context<Self>,
) -> Option<(BufferId, Vec<OutlineItem<Anchor>>)> {
- let excerpt = multi_buffer_snapshot.excerpt_containing(cursor..cursor)?;
- let excerpt_id = excerpt.id();
- let buffer_id = excerpt.buffer_id();
- if Some(buffer_id) != cursor.text_anchor.buffer_id {
- return None;
- }
- let buffer = self.buffer.read(cx).buffer(buffer_id)?;
- let buffer_snapshot = buffer.read(cx).snapshot();
- let cursor_text_anchor = cursor.text_anchor;
-
- let all_items = self.lsp_document_symbols.get(&buffer_id)?;
+ let (cursor_text_anchor, buffer) = multi_buffer_snapshot.anchor_to_buffer_anchor(cursor)?;
+ let all_items = self
+ .lsp_document_symbols
+ .get(&cursor_text_anchor.buffer_id)?;
if all_items.is_empty() {
return None;
}
@@ -97,34 +90,36 @@ impl Editor {
let mut symbols = all_items
.iter()
.filter(|item| {
- item.range
- .start
- .cmp(&cursor_text_anchor, &buffer_snapshot)
- .is_le()
- && item
- .range
- .end
- .cmp(&cursor_text_anchor, &buffer_snapshot)
- .is_ge()
+ item.range.start.cmp(&cursor_text_anchor, buffer).is_le()
+ && item.range.end.cmp(&cursor_text_anchor, buffer).is_ge()
})
- .map(|item| OutlineItem {
- depth: item.depth,
- range: Anchor::range_in_buffer(excerpt_id, item.range.clone()),
- source_range_for_text: Anchor::range_in_buffer(
- excerpt_id,
- item.source_range_for_text.clone(),
- ),
- text: item.text.clone(),
- highlight_ranges: item.highlight_ranges.clone(),
- name_ranges: item.name_ranges.clone(),
- body_range: item
- .body_range
- .as_ref()
- .map(|r| Anchor::range_in_buffer(excerpt_id, r.clone())),
- annotation_range: item
- .annotation_range
- .as_ref()
- .map(|r| Anchor::range_in_buffer(excerpt_id, r.clone())),
+ .filter_map(|item| {
+ let range_start = multi_buffer_snapshot.anchor_in_buffer(item.range.start)?;
+ let range_end = multi_buffer_snapshot.anchor_in_buffer(item.range.end)?;
+ let source_range_for_text_start =
+ multi_buffer_snapshot.anchor_in_buffer(item.source_range_for_text.start)?;
+ let source_range_for_text_end =
+ multi_buffer_snapshot.anchor_in_buffer(item.source_range_for_text.end)?;
+ Some(OutlineItem {
+ depth: item.depth,
+ range: range_start..range_end,
+ source_range_for_text: source_range_for_text_start..source_range_for_text_end,
+ text: item.text.clone(),
+ highlight_ranges: item.highlight_ranges.clone(),
+ name_ranges: item.name_ranges.clone(),
+ body_range: item.body_range.as_ref().and_then(|r| {
+ Some(
+ multi_buffer_snapshot.anchor_in_buffer(r.start)?
+ ..multi_buffer_snapshot.anchor_in_buffer(r.end)?,
+ )
+ }),
+ annotation_range: item.annotation_range.as_ref().and_then(|r| {
+ Some(
+ multi_buffer_snapshot.anchor_in_buffer(r.start)?
+ ..multi_buffer_snapshot.anchor_in_buffer(r.end)?,
+ )
+ }),
+ })
})
.collect::<Vec<_>>();
@@ -135,7 +130,7 @@ impl Editor {
retain
});
- Some((buffer_id, symbols))
+ Some((buffer.remote_id(), symbols))
}
/// Fetches document symbols from the LSP for buffers that have the setting
@@ -155,9 +150,10 @@ impl Editor {
};
let buffers_to_query = self
- .visible_excerpts(true, cx)
+ .visible_buffers(cx)
.into_iter()
- .filter_map(|(_, (buffer, _, _))| {
+ .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx))
+ .filter_map(|buffer| {
let id = buffer.read(cx).remote_id();
if for_buffer.is_none_or(|target| target == id)
&& lsp_symbols_enabled(buffer.read(cx), cx)
@@ -7,7 +7,7 @@ use gpui::{
use indoc::indoc;
use language::EditPredictionsMode;
use language::{Buffer, CodeLabel};
-use multi_buffer::{Anchor, ExcerptId, MultiBufferSnapshot, ToPoint};
+use multi_buffer::{Anchor, MultiBufferSnapshot, ToPoint};
use project::{Completion, CompletionResponse, CompletionSource};
use std::{
ops::Range,
@@ -1081,6 +1081,44 @@ async fn test_cancel_clears_stale_edit_prediction_in_menu(cx: &mut gpui::TestApp
});
}
+#[gpui::test]
+async fn test_discard_clears_delegate_completion(cx: &mut gpui::TestAppContext) {
+ init_test(cx, |_| {});
+ load_default_keymap(cx);
+
+ let mut cx = EditorTestContext::new(cx).await;
+ let provider = cx.new(|_| FakeEditPredictionDelegate::default());
+ assign_editor_completion_provider(provider.clone(), &mut cx);
+ cx.set_state("let x = ˇ;");
+
+ propose_edits(&provider, vec![(8..8, "42")], &mut cx);
+ cx.update_editor(|editor, window, cx| editor.update_visible_edit_prediction(window, cx));
+
+ cx.update_editor(|editor, _window, _cx| {
+ assert!(editor.active_edit_prediction.is_some());
+ });
+
+ // Dismiss the prediction — this must call discard() on the delegate,
+ // which should clear self.completion.
+ cx.simulate_keystroke("escape");
+ cx.run_until_parked();
+
+ cx.update_editor(|editor, _window, _cx| {
+ assert!(editor.active_edit_prediction.is_none());
+ });
+
+ // update_visible_edit_prediction must NOT bring the prediction back,
+ // because discard() cleared self.completion in the delegate.
+ cx.update_editor(|editor, window, cx| editor.update_visible_edit_prediction(window, cx));
+
+ cx.update_editor(|editor, _window, _cx| {
+ assert!(
+ editor.active_edit_prediction.is_none(),
+ "prediction must not resurface after discard()"
+ );
+ });
+}
+
fn accept_completion(cx: &mut EditorTestContext) {
cx.update_editor(|editor, window, cx| {
editor.accept_edit_prediction(&crate::AcceptEditPrediction, window, cx)
@@ -1242,15 +1280,14 @@ struct FakeCompletionMenuProvider;
impl CompletionProvider for FakeCompletionMenuProvider {
fn completions(
&self,
- _excerpt_id: ExcerptId,
- _buffer: &Entity<Buffer>,
+ buffer: &Entity<Buffer>,
_buffer_position: text::Anchor,
_trigger: CompletionContext,
_window: &mut Window,
- _cx: &mut Context<crate::Editor>,
+ cx: &mut Context<crate::Editor>,
) -> Task<anyhow::Result<Vec<CompletionResponse>>> {
let completion = Completion {
- replace_range: text::Anchor::MIN..text::Anchor::MAX,
+ replace_range: text::Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id()),
new_text: "fake_completion".to_string(),
label: CodeLabel::plain("fake_completion".to_string(), None),
documentation: None,
@@ -1351,6 +1388,7 @@ impl EditPredictionDelegate for FakeEditPredictionDelegate {
_reason: edit_prediction_types::EditPredictionDiscardReason,
_cx: &mut gpui::Context<Self>,
) {
+ self.completion.take();
}
fn suggest<'a>(
@@ -1427,6 +1465,7 @@ impl EditPredictionDelegate for FakeNonZedEditPredictionDelegate {
_reason: edit_prediction_types::EditPredictionDiscardReason,
_cx: &mut gpui::Context<Self>,
) {
+ self.completion.take();
}
fn suggest<'a>(
@@ -76,7 +76,7 @@ pub use linked_editing_ranges::LinkedEdits;
pub use lsp::CompletionContext;
pub use lsp_ext::lsp_tasks;
pub use multi_buffer::{
- Anchor, AnchorRangeExt, BufferOffset, ExcerptId, ExcerptRange, MBTextSummary, MultiBuffer,
+ Anchor, AnchorRangeExt, BufferOffset, ExcerptRange, MBTextSummary, MultiBuffer,
MultiBufferOffset, MultiBufferOffsetUtf16, MultiBufferSnapshot, PathKey, RowInfo, ToOffset,
ToPoint,
};
@@ -150,7 +150,8 @@ use markdown::Markdown;
use mouse_context_menu::MouseContextMenu;
use movement::TextLayoutDetails;
use multi_buffer::{
- ExcerptInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow,
+ ExcerptBoundaryInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint,
+ MultiBufferRow,
};
use parking_lot::Mutex;
use persistence::EditorDb;
@@ -640,6 +641,7 @@ pub(crate) enum EditDisplayMode {
enum EditPrediction {
Edit {
+ // TODO could be a language::Anchor?
edits: Vec<(Range<Anchor>, Arc<str>)>,
/// Predicted cursor position as (anchor, offset_from_anchor).
/// The anchor is in multibuffer coordinates; after applying edits,
@@ -887,7 +889,8 @@ pub trait Addon: 'static {
fn render_buffer_header_controls(
&self,
- _: &ExcerptInfo,
+ _: &ExcerptBoundaryInfo,
+ _: &language::BufferSnapshot,
_: &Window,
_: &App,
) -> Option<AnyElement> {
@@ -1340,7 +1343,7 @@ pub struct Editor {
suppress_selection_callback: bool,
applicable_language_settings: HashMap<Option<LanguageName>, LanguageSettings>,
accent_data: Option<AccentData>,
- bracket_fetched_tree_sitter_chunks: HashMap<ExcerptId, HashSet<Range<BufferRow>>>,
+ bracket_fetched_tree_sitter_chunks: HashMap<Range<text::Anchor>, HashSet<Range<BufferRow>>>,
semantic_token_state: SemanticTokenState,
pub(crate) refresh_matching_bracket_highlights_task: Task<()>,
refresh_document_symbols_task: Shared<Task<()>>,
@@ -1763,15 +1766,13 @@ impl ClipboardSelection {
project.absolute_path(&project_path, cx)
});
- let line_range = file_path.as_ref().and_then(|_| {
- let (_, start_point, start_excerpt_id) = buffer.point_to_buffer_point(range.start)?;
- let (_, end_point, end_excerpt_id) = buffer.point_to_buffer_point(range.end)?;
- if start_excerpt_id == end_excerpt_id {
- Some(start_point.row..=end_point.row)
- } else {
- None
- }
- });
+ let line_range = if file_path.is_some() {
+ buffer
+ .range_to_buffer_range(range)
+ .map(|(_, buffer_range)| buffer_range.start.row..=buffer_range.end.row)
+ } else {
+ None
+ };
Self {
len,
@@ -1852,9 +1853,8 @@ pub enum JumpData {
line_offset_from_top: u32,
},
MultiBufferPoint {
- excerpt_id: ExcerptId,
+ anchor: language::Anchor,
position: Point,
- anchor: text::Anchor,
line_offset_from_top: u32,
},
}
@@ -1990,17 +1990,21 @@ impl Editor {
if !self.mode.is_full() {
return;
}
- let multi_buffer = display_snapshot.buffer_snapshot();
+ let multi_buffer = display_snapshot.buffer_snapshot().clone();
let scroll_anchor = self
.scroll_manager
.native_anchor(display_snapshot, cx)
.anchor;
- let Some((excerpt_id, _, buffer)) = multi_buffer.as_singleton() else {
+ let Some(buffer_snapshot) = multi_buffer.as_singleton() else {
return;
};
- let buffer = buffer.clone();
- let buffer_visible_start = scroll_anchor.text_anchor.to_point(&buffer);
+ let buffer = buffer_snapshot.clone();
+ let Some((buffer_visible_start, _)) = multi_buffer.anchor_to_buffer_anchor(scroll_anchor)
+ else {
+ return;
+ };
+ let buffer_visible_start = buffer_visible_start.to_point(&buffer);
let max_row = buffer.max_point().row;
let start_row = buffer_visible_start.row.min(max_row);
let end_row = (buffer_visible_start.row + 10).min(max_row);
@@ -2014,22 +2018,24 @@ impl Editor {
Some(syntax.as_ref()),
)
.into_iter()
- .map(|outline_item| OutlineItem {
- depth: outline_item.depth,
- range: Anchor::range_in_buffer(excerpt_id, outline_item.range),
- source_range_for_text: Anchor::range_in_buffer(
- excerpt_id,
- outline_item.source_range_for_text,
- ),
- text: outline_item.text,
- highlight_ranges: outline_item.highlight_ranges,
- name_ranges: outline_item.name_ranges,
- body_range: outline_item
- .body_range
- .map(|range| Anchor::range_in_buffer(excerpt_id, range)),
- annotation_range: outline_item
- .annotation_range
- .map(|range| Anchor::range_in_buffer(excerpt_id, range)),
+ .filter_map(|outline_item| {
+ Some(OutlineItem {
+ depth: outline_item.depth,
+ range: multi_buffer
+ .buffer_anchor_range_to_anchor_range(outline_item.range)?,
+ source_range_for_text: multi_buffer.buffer_anchor_range_to_anchor_range(
+ outline_item.source_range_for_text,
+ )?,
+ text: outline_item.text,
+ highlight_ranges: outline_item.highlight_ranges,
+ name_ranges: outline_item.name_ranges,
+ body_range: outline_item.body_range.and_then(|range| {
+ multi_buffer.buffer_anchor_range_to_anchor_range(range)
+ }),
+ annotation_range: outline_item.annotation_range.and_then(|range| {
+ multi_buffer.buffer_anchor_range_to_anchor_range(range)
+ }),
+ })
})
.collect()
});
@@ -3024,7 +3030,10 @@ impl Editor {
fn edit_prediction_cursor_popover_prefers_preview(
&self,
completion: &EditPredictionState,
+ cx: &App,
) -> bool {
+ let multibuffer_snapshot = self.buffer.read(cx).snapshot(cx);
+
match &completion.completion {
EditPrediction::Edit {
edits, snapshot, ..
@@ -3033,8 +3042,13 @@ impl Editor {
let mut end_row: Option<u32> = None;
for (range, text) in edits {
- let edit_start_row = range.start.text_anchor.to_point(snapshot).row;
- let old_end_row = range.end.text_anchor.to_point(snapshot).row;
+ let Some((_, range)) =
+ multibuffer_snapshot.anchor_range_to_buffer_anchor_range(range.clone())
+ else {
+ continue;
+ };
+ let edit_start_row = range.start.to_point(snapshot).row;
+ let old_end_row = range.end.to_point(snapshot).row;
let inserted_newline_count = text
.as_ref()
.chars()
@@ -3083,7 +3097,7 @@ impl Editor {
.active_edit_prediction
.as_ref()
.filter(|completion| {
- self.edit_prediction_cursor_popover_prefers_preview(completion)
+ self.edit_prediction_cursor_popover_prefers_preview(completion, cx)
})
.map_or(EditPredictionKeybindAction::Accept, |_| {
EditPredictionKeybindAction::Preview
@@ -3320,13 +3334,12 @@ impl Editor {
self.buffer.read(cx).read(cx).file_at(point).cloned()
}
- pub fn active_excerpt(
- &self,
- cx: &App,
- ) -> Option<(ExcerptId, Entity<Buffer>, Range<text::Anchor>)> {
- self.buffer
- .read(cx)
- .excerpt_containing(self.selections.newest_anchor().head(), cx)
+ pub fn active_buffer(&self, cx: &App) -> Option<Entity<Buffer>> {
+ let multibuffer = self.buffer.read(cx);
+ let snapshot = multibuffer.snapshot(cx);
+ let (anchor, _) =
+ snapshot.anchor_to_buffer_anchor(self.selections.newest_anchor().head())?;
+ multibuffer.buffer(anchor.buffer_id)
}
pub fn mode(&self) -> &EditorMode {
@@ -3695,8 +3708,8 @@ impl Editor {
}
if local {
- if let Some(buffer_id) = new_cursor_position.text_anchor.buffer_id {
- self.register_buffer(buffer_id, cx);
+ if let Some((anchor, _)) = buffer.anchor_to_buffer_anchor(new_cursor_position) {
+ self.register_buffer(anchor.buffer_id, cx);
}
let mut context_menu = self.context_menu.borrow_mut();
@@ -3778,12 +3791,13 @@ impl Editor {
if selections.len() == 1 {
cx.emit(SearchEvent::ActiveMatchChanged)
}
- if local && let Some((_, _, buffer_snapshot)) = buffer.as_singleton() {
+ if local && let Some(buffer_snapshot) = buffer.as_singleton() {
let inmemory_selections = selections
.iter()
.map(|s| {
- text::ToPoint::to_point(&s.range().start.text_anchor, buffer_snapshot)
- ..text::ToPoint::to_point(&s.range().end.text_anchor, buffer_snapshot)
+ let start = s.range().start.text_anchor_in(buffer_snapshot);
+ let end = s.range().end.text_anchor_in(buffer_snapshot);
+ (start..end).to_point(buffer_snapshot)
})
.collect();
self.update_restoration_data(cx, |data| {
@@ -3829,7 +3843,6 @@ impl Editor {
fn folds_did_change(&mut self, cx: &mut Context<Self>) {
use text::ToOffset as _;
- use text::ToPoint as _;
if self.mode.is_minimap()
|| WorkspaceSettings::get(None, cx).restore_on_startup
@@ -3838,21 +3851,18 @@ impl Editor {
return;
}
- if !self.buffer().read(cx).is_singleton() {
- return;
- }
-
let display_snapshot = self
.display_map
.update(cx, |display_map, cx| display_map.snapshot(cx));
- let Some((.., snapshot)) = display_snapshot.buffer_snapshot().as_singleton() else {
+ let Some(buffer_snapshot) = display_snapshot.buffer_snapshot().as_singleton() else {
return;
};
let inmemory_folds = display_snapshot
.folds_in_range(MultiBufferOffset(0)..display_snapshot.buffer_snapshot().len())
.map(|fold| {
- fold.range.start.text_anchor.to_point(&snapshot)
- ..fold.range.end.text_anchor.to_point(&snapshot)
+ let start = fold.range.start.text_anchor_in(buffer_snapshot);
+ let end = fold.range.end.text_anchor_in(buffer_snapshot);
+ (start..end).to_point(buffer_snapshot)
})
.collect();
self.update_restoration_data(cx, |data| {
@@ -3876,8 +3886,16 @@ impl Editor {
let db_folds = display_snapshot
.folds_in_range(MultiBufferOffset(0)..display_snapshot.buffer_snapshot().len())
.map(|fold| {
- let start = fold.range.start.text_anchor.to_offset(&snapshot);
- let end = fold.range.end.text_anchor.to_offset(&snapshot);
+ let start = fold
+ .range
+ .start
+ .text_anchor_in(buffer_snapshot)
+ .to_offset(buffer_snapshot);
+ let end = fold
+ .range
+ .end
+ .text_anchor_in(buffer_snapshot)
+ .to_offset(buffer_snapshot);
// Extract fingerprints - content at fold boundaries for validation on restore
// Both fingerprints must be INSIDE the fold to avoid capturing surrounding
@@ -3886,12 +3904,14 @@ impl Editor {
// end_fp: last min(32, fold_len) bytes of fold content
// Clip to character boundaries to handle multibyte UTF-8 characters.
let fold_len = end - start;
- let start_fp_end = snapshot
+ let start_fp_end = buffer_snapshot
.clip_offset(start + std::cmp::min(FINGERPRINT_LEN, fold_len), Bias::Left);
- let start_fp: String = snapshot.text_for_range(start..start_fp_end).collect();
- let end_fp_start = snapshot
+ let start_fp: String = buffer_snapshot
+ .text_for_range(start..start_fp_end)
+ .collect();
+ let end_fp_start = buffer_snapshot
.clip_offset(end.saturating_sub(FINGERPRINT_LEN).max(start), Bias::Right);
- let end_fp: String = snapshot.text_for_range(end_fp_start..end).collect();
+ let end_fp: String = buffer_snapshot.text_for_range(end_fp_start..end).collect();
(start, end, start_fp, end_fp)
})
@@ -4654,30 +4674,31 @@ impl Editor {
fn linked_editing_ranges_for(
&self,
- selection: Range<text::Anchor>,
+ query_range: Range<text::Anchor>,
cx: &App,
) -> Option<HashMap<Entity<Buffer>, Vec<Range<text::Anchor>>>> {
+ use text::ToOffset as TO;
+
if self.linked_edit_ranges.is_empty() {
return None;
}
- let ((base_range, linked_ranges), buffer_snapshot, buffer) =
- selection.end.buffer_id.and_then(|end_buffer_id| {
- if selection.start.buffer_id != Some(end_buffer_id) {
- return None;
- }
- let buffer = self.buffer.read(cx).buffer(end_buffer_id)?;
- let snapshot = buffer.read(cx).snapshot();
- self.linked_edit_ranges
- .get(end_buffer_id, selection.start..selection.end, &snapshot)
- .map(|ranges| (ranges, snapshot, buffer))
- })?;
- use text::ToOffset as TO;
+ if query_range.start.buffer_id != query_range.end.buffer_id {
+ return None;
+ };
+ let multibuffer_snapshot = self.buffer.read(cx).snapshot(cx);
+ let buffer = self.buffer.read(cx).buffer(query_range.end.buffer_id)?;
+ let buffer_snapshot = buffer.read(cx).snapshot();
+ let (base_range, linked_ranges) = self.linked_edit_ranges.get(
+ buffer_snapshot.remote_id(),
+ query_range.clone(),
+ &buffer_snapshot,
+ )?;
// find offset from the start of current range to current cursor position
let start_byte_offset = TO::to_offset(&base_range.start, &buffer_snapshot);
- let start_offset = TO::to_offset(&selection.start, &buffer_snapshot);
+ let start_offset = TO::to_offset(&query_range.start, &buffer_snapshot);
let start_difference = start_offset - start_byte_offset;
- let end_offset = TO::to_offset(&selection.end, &buffer_snapshot);
+ let end_offset = TO::to_offset(&query_range.end, &buffer_snapshot);
let end_difference = end_offset - start_byte_offset;
// Current range has associated linked ranges.
@@ -4690,13 +4711,22 @@ impl Editor {
continue;
}
if self.selections.disjoint_anchor_ranges().any(|s| {
- if s.start.text_anchor.buffer_id != selection.start.buffer_id
- || s.end.text_anchor.buffer_id != selection.end.buffer_id
+ let Some((selection_start, _)) =
+ multibuffer_snapshot.anchor_to_buffer_anchor(s.start)
+ else {
+ return false;
+ };
+ let Some((selection_end, _)) = multibuffer_snapshot.anchor_to_buffer_anchor(s.end)
+ else {
+ return false;
+ };
+ if selection_start.buffer_id != query_range.start.buffer_id
+ || selection_end.buffer_id != query_range.end.buffer_id
{
return false;
}
- TO::to_offset(&s.start.text_anchor, &buffer_snapshot) <= end_offset
- && TO::to_offset(&s.end.text_anchor, &buffer_snapshot) >= start_offset
+ TO::to_offset(&selection_start, &buffer_snapshot) <= end_offset
+ && TO::to_offset(&selection_end, &buffer_snapshot) >= start_offset
}) {
continue;
}
@@ -5015,21 +5045,26 @@ impl Editor {
if !self.linked_edit_ranges.is_empty() {
let start_anchor = snapshot.anchor_before(selection.start);
+ let classifier = snapshot
+ .char_classifier_at(start_anchor)
+ .scope_context(Some(CharScopeContext::LinkedEdit));
- let is_word_char = text.chars().next().is_none_or(|char| {
- let classifier = snapshot
- .char_classifier_at(start_anchor.to_offset(&snapshot))
- .scope_context(Some(CharScopeContext::LinkedEdit));
- classifier.is_word(char)
- });
- let is_dot = text.as_ref() == ".";
- let should_apply_linked_edit = is_word_char || is_dot;
+ if let Some((_, anchor_range)) =
+ snapshot.anchor_range_to_buffer_anchor_range(start_anchor..anchor)
+ {
+ let is_word_char = text
+ .chars()
+ .next()
+ .is_none_or(|char| classifier.is_word(char));
- if should_apply_linked_edit {
- let anchor_range = start_anchor.text_anchor..anchor.text_anchor;
- linked_edits.push(&self, anchor_range, text.clone(), cx);
- } else {
- clear_linked_edit_ranges = true;
+ let is_dot = text.as_ref() == ".";
+ let should_apply_linked_edit = is_word_char || is_dot;
+
+ if should_apply_linked_edit {
+ linked_edits.push(&self, anchor_range, text.clone(), cx);
+ } else {
+ clear_linked_edit_ranges = true;
+ }
}
}
@@ -5522,7 +5557,7 @@ impl Editor {
let row = cursor.row;
let point = Point::new(row, 0);
- let Some((buffer_handle, buffer_point, _)) =
+ let Some((buffer_handle, buffer_point)) =
self.buffer.read(cx).point_to_buffer_point(point, cx)
else {
continue;
@@ -5662,12 +5697,16 @@ impl Editor {
/// Collects linked edits for the current selections, pairing each linked
/// range with `text`.
pub fn linked_edits_for_selections(&self, text: Arc<str>, cx: &App) -> LinkedEdits {
+ let multibuffer_snapshot = self.buffer().read(cx).snapshot(cx);
let mut linked_edits = LinkedEdits::new();
if !self.linked_edit_ranges.is_empty() {
for selection in self.selections.disjoint_anchors() {
- let start = selection.start.text_anchor;
- let end = selection.end.text_anchor;
- linked_edits.push(self, start..end, text.clone(), cx);
+ let Some((_, range)) =
+ multibuffer_snapshot.anchor_range_to_buffer_anchor_range(selection.range())
+ else {
+ continue;
+ };
+ linked_edits.push(self, range, text.clone(), cx);
}
}
linked_edits
@@ -5898,53 +5937,54 @@ impl Editor {
}
}
- pub fn visible_excerpts(
- &self,
- lsp_related_only: bool,
- cx: &mut Context<Editor>,
- ) -> HashMap<ExcerptId, (Entity<Buffer>, clock::Global, Range<usize>)> {
- let project = self.project().cloned();
- let display_snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx));
+ pub fn is_lsp_relevant(&self, file: Option<&Arc<dyn language::File>>, cx: &App) -> bool {
+ let Some(project) = self.project() else {
+ return false;
+ };
+ let Some(buffer_file) = project::File::from_dyn(file) else {
+ return false;
+ };
+ let Some(entry_id) = buffer_file.project_entry_id() else {
+ return false;
+ };
+ let project = project.read(cx);
+ let Some(buffer_worktree) = project.worktree_for_id(buffer_file.worktree_id(cx), cx) else {
+ return false;
+ };
+ let Some(worktree_entry) = buffer_worktree.read(cx).entry_for_id(entry_id) else {
+ return false;
+ };
+ !worktree_entry.is_ignored
+ }
+
+ pub fn visible_buffers(&self, cx: &mut Context<Editor>) -> Vec<Entity<Buffer>> {
+ let display_snapshot = self.display_snapshot(cx);
+ let visible_range = self.multi_buffer_visible_range(&display_snapshot, cx);
let multi_buffer = self.buffer().read(cx);
- let multi_buffer_snapshot = multi_buffer.snapshot(cx);
- multi_buffer_snapshot
- .range_to_buffer_ranges(
- self.multi_buffer_visible_range(&display_snapshot, cx)
- .to_inclusive(),
- )
+ display_snapshot
+ .buffer_snapshot()
+ .range_to_buffer_ranges(visible_range)
.into_iter()
.filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty())
- .filter_map(|(buffer, excerpt_visible_range, excerpt_id)| {
- if !lsp_related_only {
- return Some((
- excerpt_id,
- (
- multi_buffer.buffer(buffer.remote_id()).unwrap(),
- buffer.version().clone(),
- excerpt_visible_range.start.0..excerpt_visible_range.end.0,
- ),
- ));
- }
+ .filter_map(|(buffer_snapshot, _, _)| multi_buffer.buffer(buffer_snapshot.remote_id()))
+ .collect()
+ }
- let project = project.as_ref()?.read(cx);
- let buffer_file = project::File::from_dyn(buffer.file())?;
- let buffer_worktree = project.worktree_for_id(buffer_file.worktree_id(cx), cx)?;
- let worktree_entry = buffer_worktree
- .read(cx)
- .entry_for_id(buffer_file.project_entry_id()?)?;
- if worktree_entry.is_ignored {
- None
- } else {
- Some((
- excerpt_id,
- (
- multi_buffer.buffer(buffer.remote_id()).unwrap(),
- buffer.version().clone(),
- excerpt_visible_range.start.0..excerpt_visible_range.end.0,
- ),
- ))
- }
- })
+ pub fn visible_buffer_ranges(
+ &self,
+ cx: &mut Context<Editor>,
+ ) -> Vec<(
+ BufferSnapshot,
+ Range<BufferOffset>,
+ ExcerptRange<text::Anchor>,
+ )> {
+ let display_snapshot = self.display_snapshot(cx);
+ let visible_range = self.multi_buffer_visible_range(&display_snapshot, cx);
+ display_snapshot
+ .buffer_snapshot()
+ .range_to_buffer_ranges(visible_range)
+ .into_iter()
+ .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty())
.collect()
}
@@ -6069,17 +6109,19 @@ impl Editor {
.newest_anchor()
.start
.bias_right(&multibuffer_snapshot);
- if position.diff_base_anchor.is_some() {
+
+ if position.diff_base_anchor().is_some() {
return;
}
- let buffer_position = multibuffer_snapshot.anchor_before(position);
- let Some(buffer) = buffer_position
- .text_anchor
- .buffer_id
- .and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id))
+ let multibuffer_position = multibuffer_snapshot.anchor_before(position);
+ let Some((buffer_position, _)) =
+ multibuffer_snapshot.anchor_to_buffer_anchor(multibuffer_position)
else {
return;
};
+ let Some(buffer) = self.buffer.read(cx).buffer(buffer_position.buffer_id) else {
+ return;
+ };
let buffer_snapshot = buffer.read(cx).snapshot();
let menu_is_open = matches!(
@@ -6088,9 +6130,9 @@ impl Editor {
);
let language = buffer_snapshot
- .language_at(buffer_position.text_anchor)
+ .language_at(buffer_position)
.map(|language| language.name());
- let language_settings = multibuffer_snapshot.language_settings_at(buffer_position, cx);
+ let language_settings = multibuffer_snapshot.language_settings_at(multibuffer_position, cx);
let completion_settings = language_settings.completions.clone();
let show_completions_on_input = self
@@ -6101,7 +6143,7 @@ impl Editor {
}
let query: Option<Arc<String>> =
- Self::completion_query(&multibuffer_snapshot, buffer_position)
+ Self::completion_query(&multibuffer_snapshot, multibuffer_position)
.map(|query| query.into());
drop(multibuffer_snapshot);
@@ -6143,7 +6185,7 @@ impl Editor {
if filter_completions {
menu.filter(
query.clone().unwrap_or_default(),
- buffer_position.text_anchor,
+ buffer_position,
&buffer,
provider.clone(),
window,
@@ -6177,12 +6219,6 @@ impl Editor {
}
};
- let Anchor {
- excerpt_id: buffer_excerpt_id,
- text_anchor: buffer_position,
- ..
- } = buffer_position;
-
let (word_replace_range, word_to_exclude) = if let (word_range, Some(CharKind::Word)) =
buffer_snapshot.surrounding_word(buffer_position, None)
{
@@ -6225,7 +6261,7 @@ impl Editor {
trigger.as_ref().is_none_or(|trigger| {
provider.is_completion_trigger(
&buffer,
- position.text_anchor,
+ buffer_position,
trigger,
trigger_in_words,
cx,
@@ -6246,14 +6282,7 @@ impl Editor {
trigger_character,
};
- provider.completions(
- buffer_excerpt_id,
- &buffer,
- buffer_position,
- completion_context,
- window,
- cx,
- )
+ provider.completions(&buffer, buffer_position, completion_context, window, cx)
} else {
Task::ready(Ok(Vec::new()))
};
@@ -6593,42 +6622,42 @@ impl Editor {
cx.stop_propagation();
let buffer_handle = completions_menu.buffer.clone();
+ let multibuffer_snapshot = self.buffer.read(cx).snapshot(cx);
+ let (initial_position, _) =
+ multibuffer_snapshot.anchor_to_buffer_anchor(completions_menu.initial_position)?;
let CompletionEdit {
new_text,
snippet,
replace_range,
- } = process_completion_for_edit(
- &completion,
- intent,
- &buffer_handle,
- &completions_menu.initial_position.text_anchor,
- cx,
- );
+ } = process_completion_for_edit(&completion, intent, &buffer_handle, &initial_position, cx);
- let buffer = buffer_handle.read(cx);
- let snapshot = self.buffer.read(cx).snapshot(cx);
- let newest_anchor = self.selections.newest_anchor();
- let replace_range_multibuffer = {
- let mut excerpt = snapshot.excerpt_containing(newest_anchor.range()).unwrap();
- excerpt.map_range_from_buffer(replace_range.clone())
+ let buffer = buffer_handle.read(cx).snapshot();
+ let newest_selection = self.selections.newest_anchor();
+
+ let Some(replace_range_multibuffer) =
+ multibuffer_snapshot.buffer_anchor_range_to_anchor_range(replace_range.clone())
+ else {
+ return None;
};
- if snapshot.buffer_id_for_anchor(newest_anchor.head()) != Some(buffer.remote_id()) {
+
+ let Some((buffer_snapshot, newest_range_buffer)) =
+ multibuffer_snapshot.anchor_range_to_buffer_anchor_range(newest_selection.range())
+ else {
return None;
- }
+ };
let old_text = buffer
.text_for_range(replace_range.clone())
.collect::<String>();
- let lookbehind = newest_anchor
+ let lookbehind = newest_range_buffer
.start
- .text_anchor
- .to_offset(buffer)
- .saturating_sub(replace_range.start.0);
+ .to_offset(buffer_snapshot)
+ .saturating_sub(replace_range.start.to_offset(&buffer_snapshot));
let lookahead = replace_range
.end
- .0
- .saturating_sub(newest_anchor.end.text_anchor.to_offset(buffer));
+ .to_offset(&buffer_snapshot)
+ .saturating_sub(newest_range_buffer.end.to_offset(&buffer));
let prefix = &old_text[..old_text.len().saturating_sub(lookahead)];
let suffix = &old_text[lookbehind.min(old_text.len())..];
@@ -6641,34 +6670,40 @@ impl Editor {
let text: Arc<str> = new_text.clone().into();
for selection in &selections {
- let range = if selection.id == newest_anchor.id {
+ let range = if selection.id == newest_selection.id {
replace_range_multibuffer.clone()
} else {
let mut range = selection.range();
// if prefix is present, don't duplicate it
- if snapshot.contains_str_at(range.start.saturating_sub_usize(lookbehind), prefix) {
+ if multibuffer_snapshot
+ .contains_str_at(range.start.saturating_sub_usize(lookbehind), prefix)
+ {
range.start = range.start.saturating_sub_usize(lookbehind);
// if suffix is also present, mimic the newest cursor and replace it
- if selection.id != newest_anchor.id
- && snapshot.contains_str_at(range.end, suffix)
+ if selection.id != newest_selection.id
+ && multibuffer_snapshot.contains_str_at(range.end, suffix)
{
range.end += lookahead;
}
}
- range
+ range.to_anchors(&multibuffer_snapshot)
};
ranges.push(range.clone());
- let start_anchor = snapshot.anchor_before(range.start);
- let end_anchor = snapshot.anchor_after(range.end);
- let anchor_range = start_anchor.text_anchor..end_anchor.text_anchor;
- all_commit_ranges.push(anchor_range.clone());
+ let start_anchor = multibuffer_snapshot.anchor_before(range.start);
+ let end_anchor = multibuffer_snapshot.anchor_after(range.end);
- if !self.linked_edit_ranges.is_empty() {
- linked_edits.push(&self, anchor_range, text.clone(), cx);
+ if let Some((buffer_snapshot_2, anchor_range)) =
+ multibuffer_snapshot.anchor_range_to_buffer_anchor_range(start_anchor..end_anchor)
+ && buffer_snapshot_2.remote_id() == buffer_snapshot.remote_id()
+ {
+ all_commit_ranges.push(anchor_range.clone());
+ if !self.linked_edit_ranges.is_empty() {
+ linked_edits.push(&self, anchor_range, text.clone(), cx);
+ }
}
}
@@ -6687,8 +6722,12 @@ impl Editor {
let tx_id = self.transact(window, cx, |editor, window, cx| {
if let Some(mut snippet) = snippet {
snippet.text = new_text.to_string();
+ let offset_ranges = ranges
+ .iter()
+ .map(|range| range.to_offset(&multibuffer_snapshot))
+ .collect::<Vec<_>>();
editor
- .insert_snippet(&ranges, snippet, window, cx)
+ .insert_snippet(&offset_ranges, snippet, window, cx)
.log_err();
} else {
editor.buffer.update(cx, |multi_buffer, cx| {
@@ -6703,7 +6742,10 @@ impl Editor {
linked_edits.apply(cx);
editor.refresh_edit_prediction(true, false, window, cx);
});
- self.invalidate_autoclose_regions(&self.selections.disjoint_anchors_arc(), &snapshot);
+ self.invalidate_autoclose_regions(
+ &self.selections.disjoint_anchors_arc(),
+ &multibuffer_snapshot,
+ );
let show_new_completions_on_confirm = completion
.confirm
@@ -6739,7 +6781,7 @@ impl Editor {
if available_commands.contains(&lsp_command.command) {
Some(CodeAction {
server_id: *server_id,
- range: language::Anchor::MIN..language::Anchor::MIN,
+ range: language::Anchor::min_min_range_for_buffer(buffer.remote_id()),
lsp_action: LspAction::Command(lsp_command.clone()),
resolved: false,
})
@@ -7069,13 +7111,9 @@ impl Editor {
Some(Task::ready(Ok(())))
})
}
- CodeActionsItem::CodeAction {
- excerpt_id,
- action,
- provider,
- } => {
+ CodeActionsItem::CodeAction { action, provider } => {
let apply_code_action =
- provider.apply_code_action(buffer, action, excerpt_id, true, window, cx);
+ provider.apply_code_action(buffer, action, true, window, cx);
let workspace = workspace.downgrade();
Some(cx.spawn_in(window, async move |editor, cx| {
let project_transaction = apply_code_action.await?;
@@ -7175,17 +7213,19 @@ impl Editor {
// avoid opening a new editor to display them.
if let [(buffer, transaction)] = &*entries {
- let excerpt = editor.update(cx, |editor, cx| {
- editor
- .buffer()
- .read(cx)
- .excerpt_containing(editor.selections.newest_anchor().head(), cx)
+ let cursor_excerpt = editor.update(cx, |editor, cx| {
+ let snapshot = editor.buffer().read(cx).snapshot(cx);
+ let head = editor.selections.newest_anchor().head();
+ let (buffer_snapshot, excerpt_range) = snapshot.excerpt_containing(head..head)?;
+ if buffer_snapshot.remote_id() != buffer.read(cx).remote_id() {
+ return None;
+ }
+ Some(excerpt_range)
})?;
- if let Some((_, excerpted_buffer, excerpt_range)) = excerpt
- && excerpted_buffer == *buffer
- {
+
+ if let Some(excerpt_range) = cursor_excerpt {
let all_edits_within_excerpt = buffer.read_with(cx, |buffer, _| {
- let excerpt_range = excerpt_range.to_offset(buffer);
+ let excerpt_range = excerpt_range.context.to_offset(buffer);
buffer
.edited_ranges_for_transaction::<usize>(transaction)
.all(|range| {
@@ -7207,15 +7247,21 @@ impl Editor {
.read(cx)
.edited_ranges_for_transaction::<Point>(transaction)
.collect::<Vec<_>>();
- let (ranges, _) = multibuffer.set_excerpts_for_path(
+ multibuffer.set_excerpts_for_path(
PathKey::for_buffer(buffer_handle, cx),
buffer_handle.clone(),
- edited_ranges,
+ edited_ranges.clone(),
multibuffer_context_lines(cx),
cx,
);
-
- ranges_to_highlight.extend(ranges);
+ let snapshot = multibuffer.snapshot(cx);
+ let buffer_snapshot = buffer_handle.read(cx).snapshot();
+ ranges_to_highlight.extend(edited_ranges.into_iter().filter_map(|range| {
+ let text_range = buffer_snapshot.anchor_range_inside(range);
+ let start = snapshot.anchor_in_buffer(text_range.start)?;
+ let end = snapshot.anchor_in_buffer(text_range.end)?;
+ Some(start..end)
+ }));
}
multibuffer.push_transaction(entries.iter().map(|(b, t)| (b, t)), cx);
multibuffer
@@ -7339,10 +7385,10 @@ impl Editor {
.timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT)
.await;
- let (start_buffer, start, _, end, newest_selection) = this
+ let (start_buffer, start, _, end, _newest_selection) = this
.update(cx, |this, cx| {
let newest_selection = this.selections.newest_anchor().clone();
- if newest_selection.head().diff_base_anchor.is_some() {
+ if newest_selection.head().diff_base_anchor().is_some() {
return None;
}
let display_snapshot = this.display_snapshot(cx);
@@ -7378,7 +7424,6 @@ impl Editor {
if let Some(provider_actions) = provider_actions.log_err() {
actions.extend(provider_actions.into_iter().map(|action| {
AvailableCodeAction {
- excerpt_id: newest_selection.start.excerpt_id,
action,
provider: provider.clone(),
}
@@ -7426,8 +7471,7 @@ impl Editor {
.selections
.newest::<Point>(&snapshot.display_snapshot)
.head();
- let Some((buffer, point, _)) = snapshot.buffer_snapshot().point_to_buffer_point(cursor)
- else {
+ let Some((buffer, point)) = snapshot.buffer_snapshot().point_to_buffer_point(cursor) else {
return;
};
@@ -7612,27 +7656,13 @@ impl Editor {
return;
}
- let cursor_buffer_snapshot = cursor_buffer.read(cx);
let mut write_ranges = Vec::new();
let mut read_ranges = Vec::new();
+ let multibuffer_snapshot = buffer.snapshot(cx);
for highlight in highlights {
- let buffer_id = cursor_buffer.read(cx).remote_id();
- for (excerpt_id, _, excerpt_range) in
- buffer.excerpts_for_buffer(buffer_id, cx)
+ for range in
+ multibuffer_snapshot.buffer_range_to_excerpt_ranges(highlight.range)
{
- let start = highlight
- .range
- .start
- .max(&excerpt_range.context.start, cursor_buffer_snapshot);
- let end = highlight
- .range
- .end
- .min(&excerpt_range.context.end, cursor_buffer_snapshot);
- if start.cmp(&end, cursor_buffer_snapshot).is_ge() {
- continue;
- }
-
- let range = Anchor::range_in_buffer(excerpt_id, *start..*end);
if highlight.kind == lsp::DocumentHighlightKind::WRITE {
write_ranges.push(range);
} else {
@@ -7713,7 +7743,7 @@ impl Editor {
let match_task = cx.background_spawn(async move {
let buffer_ranges = multi_buffer_snapshot
.range_to_buffer_ranges(
- multi_buffer_range_to_query.start..=multi_buffer_range_to_query.end,
+ multi_buffer_range_to_query.start..multi_buffer_range_to_query.end,
)
.into_iter()
.filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty());
@@ -7731,11 +7761,11 @@ impl Editor {
return Vec::default();
};
let query_range = query_range.to_anchors(&multi_buffer_snapshot);
- for (buffer_snapshot, search_range, excerpt_id) in buffer_ranges {
+ for (buffer_snapshot, search_range, _) in buffer_ranges {
match_ranges.extend(
regex
.search(
- buffer_snapshot,
+ &buffer_snapshot,
Some(search_range.start.0..search_range.end.0),
)
.await
@@ -7745,9 +7775,14 @@ impl Editor {
.anchor_after(search_range.start + match_range.start);
let match_end = buffer_snapshot
.anchor_before(search_range.start + match_range.end);
- let match_anchor_range =
- Anchor::range_in_buffer(excerpt_id, match_start..match_end);
- (match_anchor_range != query_range).then_some(match_anchor_range)
+ {
+ let range = multi_buffer_snapshot
+ .anchor_in_buffer(match_start)?
+ ..multi_buffer_snapshot.anchor_in_buffer(match_end)?;
+ Some(range).filter(|match_anchor_range| {
+ match_anchor_range != &query_range
+ })
+ }
}),
);
}
@@ -8434,13 +8469,15 @@ impl Editor {
return;
};
- let Some((_, buffer, _)) = self
- .buffer
- .read(cx)
- .excerpt_containing(self.selections.newest_anchor().head(), cx)
+ let buffer_snapshot = self.buffer.read(cx).snapshot(cx);
+ let Some((position, _)) =
+ buffer_snapshot.anchor_to_buffer_anchor(self.selections.newest_anchor().head())
else {
return;
};
+ let Some(buffer) = self.buffer.read(cx).buffer(position.buffer_id) else {
+ return;
+ };
let extension = buffer
.read(cx)
@@ -8687,17 +8724,16 @@ impl Editor {
}
let selection = self.selections.newest_anchor();
- let cursor = selection.head();
let multibuffer = self.buffer.read(cx).snapshot(cx);
+ let cursor = selection.head();
+ let (cursor_text_anchor, _) = multibuffer.anchor_to_buffer_anchor(cursor)?;
+ let buffer = self.buffer.read(cx).buffer(cursor_text_anchor.buffer_id)?;
// Check project-level disable_ai setting for the current buffer
- if let Some((buffer, _)) = self.buffer.read(cx).text_anchor_for_position(cursor, cx) {
- if DisableAiSettings::is_ai_disabled_for_buffer(Some(&buffer), cx) {
- return None;
- }
+ if DisableAiSettings::is_ai_disabled_for_buffer(Some(&buffer), cx) {
+ return None;
}
let offset_selection = selection.map(|endpoint| endpoint.to_offset(&multibuffer));
- let excerpt_id = cursor.excerpt_id;
let show_in_menu = self.show_edit_predictions_in_menu();
let completions_menu_has_precedence = !show_in_menu
@@ -52,14 +52,13 @@ use settings::{
ProjectSettingsContent, ScrollBeyondLastLine, SearchSettingsContent, SettingsContent,
SettingsStore,
};
-use std::borrow::Cow;
+use std::{borrow::Cow, sync::Arc};
use std::{cell::RefCell, future::Future, rc::Rc, sync::atomic::AtomicBool, time::Instant};
use std::{
iter,
sync::atomic::{self, AtomicUsize},
};
use test::build_editor_with_project;
-use text::ToPoint as _;
use unindent::Unindent;
use util::{
assert_set_eq, path,
@@ -1030,12 +1029,13 @@ async fn test_navigation_history(cx: &mut TestAppContext) {
original_scroll_position
);
+ let other_buffer =
+ cx.new(|cx| MultiBuffer::singleton(cx.new(|cx| Buffer::local("test", cx)), cx));
+
// Ensure we don't panic when navigation data contains invalid anchors *and* points.
- let mut invalid_anchor = editor
- .scroll_manager
- .native_anchor(&editor.display_snapshot(cx), cx)
- .anchor;
- invalid_anchor.text_anchor.buffer_id = BufferId::new(999).ok();
+ let invalid_anchor = other_buffer.update(cx, |buffer, cx| {
+ buffer.snapshot(cx).anchor_after(MultiBufferOffset(3))
+ });
let invalid_point = Point::new(9999, 0);
editor.navigate(
Arc::new(NavigationData {
@@ -13836,7 +13836,7 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) {
0,
cx,
);
- assert_eq!(multi_buffer.excerpt_ids().len(), 9);
+ assert_eq!(multi_buffer.read(cx).excerpts().count(), 9);
multi_buffer
});
let multi_buffer_editor = cx.new_window_entity(|window, cx| {
@@ -18946,157 +18946,6 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) {
});
}
-#[gpui::test]
-fn test_refresh_selections(cx: &mut TestAppContext) {
- init_test(cx, |_| {});
-
- let buffer = cx.new(|cx| Buffer::local(sample_text(5, 4, 'a'), cx));
- let multibuffer = cx.new(|cx| {
- let mut multibuffer = MultiBuffer::new(ReadWrite);
- multibuffer.set_excerpts_for_path(
- PathKey::sorted(0),
- buffer.clone(),
- [
- Point::new(0, 0)..Point::new(1, 4),
- Point::new(3, 0)..Point::new(4, 4),
- ],
- 0,
- cx,
- );
- multibuffer
- });
-
- let editor = cx.add_window(|window, cx| {
- let mut editor = build_editor(multibuffer.clone(), window, cx);
- let snapshot = editor.snapshot(window, cx);
- editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
- s.select_ranges([Point::new(1, 3)..Point::new(1, 3)])
- });
- editor.begin_selection(
- Point::new(2, 1).to_display_point(&snapshot),
- true,
- 1,
- window,
- cx,
- );
- assert_eq!(
- editor.selections.ranges(&editor.display_snapshot(cx)),
- [
- Point::new(1, 3)..Point::new(1, 3),
- Point::new(2, 1)..Point::new(2, 1),
- ]
- );
- editor
- });
-
- // Refreshing selections is a no-op when excerpts haven't changed.
- _ = editor.update(cx, |editor, window, cx| {
- editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh());
- assert_eq!(
- editor.selections.ranges(&editor.display_snapshot(cx)),
- [
- Point::new(1, 3)..Point::new(1, 3),
- Point::new(2, 1)..Point::new(2, 1),
- ]
- );
- });
-
- multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.set_excerpts_for_path(
- PathKey::sorted(0),
- buffer.clone(),
- [Point::new(3, 0)..Point::new(4, 4)],
- 0,
- cx,
- );
- });
- _ = editor.update(cx, |editor, window, cx| {
- // Removing an excerpt causes the first selection to become degenerate.
- assert_eq!(
- editor.selections.ranges(&editor.display_snapshot(cx)),
- [
- Point::new(0, 0)..Point::new(0, 0),
- Point::new(0, 1)..Point::new(0, 1)
- ]
- );
-
- // Refreshing selections will relocate the first selection to the original buffer
- // location.
- editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh());
- assert_eq!(
- editor.selections.ranges(&editor.display_snapshot(cx)),
- [
- Point::new(0, 0)..Point::new(0, 0),
- Point::new(0, 1)..Point::new(0, 1),
- ]
- );
- assert!(editor.selections.pending_anchor().is_some());
- });
-}
-
-#[gpui::test]
-fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) {
- init_test(cx, |_| {});
-
- let buffer = cx.new(|cx| Buffer::local(sample_text(5, 4, 'a'), cx));
- let multibuffer = cx.new(|cx| {
- let mut multibuffer = MultiBuffer::new(ReadWrite);
- multibuffer.set_excerpts_for_path(
- PathKey::sorted(0),
- buffer.clone(),
- [
- Point::new(0, 0)..Point::new(1, 4),
- Point::new(3, 0)..Point::new(4, 4),
- ],
- 0,
- cx,
- );
- assert_eq!(multibuffer.read(cx).text(), "aaaa\nbbbb\ndddd\neeee");
- multibuffer
- });
-
- let editor = cx.add_window(|window, cx| {
- let mut editor = build_editor(multibuffer.clone(), window, cx);
- let snapshot = editor.snapshot(window, cx);
- editor.begin_selection(
- Point::new(1, 3).to_display_point(&snapshot),
- false,
- 1,
- window,
- cx,
- );
- assert_eq!(
- editor.selections.ranges(&editor.display_snapshot(cx)),
- [Point::new(1, 3)..Point::new(1, 3)]
- );
- editor
- });
-
- multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.set_excerpts_for_path(
- PathKey::sorted(0),
- buffer.clone(),
- [Point::new(3, 0)..Point::new(4, 4)],
- 0,
- cx,
- );
- });
- _ = editor.update(cx, |editor, window, cx| {
- assert_eq!(
- editor.selections.ranges(&editor.display_snapshot(cx)),
- [Point::new(0, 0)..Point::new(0, 0)]
- );
-
- // Ensure we don't panic when selections are refreshed and that the pending selection is finalized.
- editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh());
- assert_eq!(
- editor.selections.ranges(&editor.display_snapshot(cx)),
- [Point::new(0, 0)..Point::new(0, 0)]
- );
- assert!(editor.selections.pending_anchor().is_some());
- });
-}
-
#[gpui::test]
async fn test_extra_newline_insertion(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -19263,7 +19112,7 @@ async fn test_copy_highlight_json(cx: &mut TestAppContext) {
let x = 1;ˇ
}
"});
- setup_rust_syntax_highlighting(&mut cx);
+ setup_syntax_highlighting(rust_lang(), &mut cx);
cx.update_editor(|editor, window, cx| {
editor.copy_highlight_json(&CopyHighlightJson, window, cx);
@@ -19311,7 +19160,7 @@ async fn test_copy_highlight_json_selected_range(cx: &mut TestAppContext) {
let yˇ» = 2;
}
"});
- setup_rust_syntax_highlighting(&mut cx);
+ setup_syntax_highlighting(rust_lang(), &mut cx);
cx.update_editor(|editor, window, cx| {
editor.copy_highlight_json(&CopyHighlightJson, window, cx);
@@ -19354,7 +19203,7 @@ async fn test_copy_highlight_json_selected_line_range(cx: &mut TestAppContext) {
let yˇ» = 2;
}
"});
- setup_rust_syntax_highlighting(&mut cx);
+ setup_syntax_highlighting(rust_lang(), &mut cx);
cx.update_editor(|editor, window, cx| {
editor.selections.set_line_mode(true);
@@ -19404,7 +19253,7 @@ async fn test_copy_highlight_json_single_line(cx: &mut TestAppContext) {
let y = 2;
}
"});
- setup_rust_syntax_highlighting(&mut cx);
+ setup_syntax_highlighting(rust_lang(), &mut cx);
cx.update_editor(|editor, window, cx| {
editor.selections.set_line_mode(true);
@@ -19431,34 +19280,6 @@ async fn test_copy_highlight_json_single_line(cx: &mut TestAppContext) {
);
}
-fn setup_rust_syntax_highlighting(cx: &mut EditorTestContext) {
- let syntax = SyntaxTheme::new_test(vec![
- ("keyword", Hsla::red()),
- ("function", Hsla::blue()),
- ("variable", Hsla::green()),
- ("number", Hsla::default()),
- ("operator", Hsla::default()),
- ("punctuation.bracket", Hsla::default()),
- ("punctuation.delimiter", Hsla::default()),
- ]);
-
- let language = rust_lang();
- language.set_theme(&syntax);
-
- cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx));
- cx.executor().run_until_parked();
- cx.update_editor(|editor, window, cx| {
- editor.set_style(
- EditorStyle {
- syntax: Arc::new(syntax),
- ..Default::default()
- },
- window,
- cx,
- );
- });
-}
-
#[gpui::test]
async fn test_following(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -19738,8 +19559,8 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) {
let (buffer_1, buffer_2) = project.update(cx, |project, cx| {
(
- project.create_local_buffer("abc\ndef\nghi\njkl\n", None, false, cx),
- project.create_local_buffer("mno\npqr\nstu\nvwx\n", None, false, cx),
+ project.create_local_buffer("abc\ndef\nghi\njkl\nmno\npqr\nstu\nvwx\nyza\nbcd\nefg\nhij\nklm\nnop\nqrs\ntuv\nwxy\nzab\ncde\nfgh\n", None, false, cx),
+ project.create_local_buffer("aaa\nbbb\nccc\nddd\neee\nfff\nggg\nhhh\niii\njjj\nkkk\nlll\nmmm\nnnn\nooo\nppp\nqqq\nrrr\nsss\nttt\n", None, false, cx),
)
});
@@ -19814,7 +19635,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) {
// Remove some excerpts.
leader.update(cx, |leader, cx| {
leader.buffer.update(cx, |multibuffer, cx| {
- multibuffer.remove_excerpts_for_path(
+ multibuffer.remove_excerpts(
PathKey::with_sort_prefix(1, rel_path("b.txt").into_arc()),
cx,
);
@@ -23318,7 +23139,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut TestAppContext) {
0,
cx,
);
- assert_eq!(multibuffer.excerpt_ids().len(), 9);
+ assert_eq!(multibuffer.read(cx).excerpts().count(), 9);
multibuffer
});
@@ -23422,7 +23243,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut TestAppContext) {
0,
cx,
);
- assert_eq!(multibuffer.excerpt_ids().len(), 3);
+ assert_eq!(multibuffer.read(cx).excerpts().count(), 3);
multibuffer
});
@@ -24191,9 +24012,13 @@ async fn setup_indent_guides_editor(
let buffer_id = cx.update_editor(|editor, window, cx| {
editor.set_text(text, window, cx);
- let buffer_ids = editor.buffer().read(cx).excerpt_buffer_ids();
-
- buffer_ids[0]
+ editor
+ .buffer()
+ .read(cx)
+ .as_singleton()
+ .unwrap()
+ .read(cx)
+ .remote_id()
});
(buffer_id, cx)
@@ -24902,7 +24727,7 @@ async fn test_indent_guide_with_expanded_diff_hunks(cx: &mut TestAppContext) {
editor
.snapshot(window, cx)
.buffer_snapshot()
- .indent_guides_in_range(Anchor::min()..Anchor::max(), false, cx)
+ .indent_guides_in_range(Anchor::Min..Anchor::Max, false, cx)
.map(|guide| (guide.start_row..=guide.end_row, guide.depth))
.collect::<Vec<_>>()
});
@@ -24957,12 +24782,19 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp
let hunk_ranges = cx.update_editor(|editor, window, cx| {
let snapshot = editor.snapshot(window, cx);
let hunks = editor
- .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot())
+ .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot())
.collect::<Vec<_>>();
- let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0];
+ let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx);
hunks
.into_iter()
- .map(|hunk| Anchor::range_in_buffer(excerpt_id, hunk.buffer_range))
+ .map(|hunk| {
+ multibuffer_snapshot
+ .anchor_in_excerpt(hunk.buffer_range.start)
+ .unwrap()
+ ..multibuffer_snapshot
+ .anchor_in_excerpt(hunk.buffer_range.end)
+ .unwrap()
+ })
.collect::<Vec<_>>()
});
assert_eq!(hunk_ranges.len(), 2);
@@ -25047,12 +24879,19 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp
let hunk_ranges = cx.update_editor(|editor, window, cx| {
let snapshot = editor.snapshot(window, cx);
let hunks = editor
- .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot())
+ .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot())
.collect::<Vec<_>>();
- let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0];
+ let multibuffer_snapshot = snapshot.buffer_snapshot();
hunks
.into_iter()
- .map(|hunk| Anchor::range_in_buffer(excerpt_id, hunk.buffer_range))
+ .map(|hunk| {
+ multibuffer_snapshot
+ .anchor_in_excerpt(hunk.buffer_range.start)
+ .unwrap()
+ ..multibuffer_snapshot
+ .anchor_in_excerpt(hunk.buffer_range.end)
+ .unwrap()
+ })
.collect::<Vec<_>>()
});
assert_eq!(hunk_ranges.len(), 2);
@@ -25112,12 +24951,19 @@ async fn test_toggle_deletion_hunk_at_start_of_file(
let hunk_ranges = cx.update_editor(|editor, window, cx| {
let snapshot = editor.snapshot(window, cx);
let hunks = editor
- .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot())
+ .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot())
.collect::<Vec<_>>();
- let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0];
+ let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx);
hunks
.into_iter()
- .map(|hunk| Anchor::range_in_buffer(excerpt_id, hunk.buffer_range))
+ .map(|hunk| {
+ multibuffer_snapshot
+ .anchor_in_excerpt(hunk.buffer_range.start)
+ .unwrap()
+ ..multibuffer_snapshot
+ .anchor_in_excerpt(hunk.buffer_range.end)
+ .unwrap()
+ })
.collect::<Vec<_>>()
});
assert_eq!(hunk_ranges.len(), 1);
@@ -25217,12 +25063,17 @@ async fn test_expand_first_line_diff_hunk_keeps_deleted_lines_visible(
// Expanding a diff hunk at the first line inserts deleted lines above the first buffer line.
cx.update_editor(|editor, window, cx| {
let snapshot = editor.snapshot(window, cx);
- let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0];
+ let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx);
let hunks = editor
- .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot())
+ .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot())
.collect::<Vec<_>>();
assert_eq!(hunks.len(), 1);
- let hunk_range = Anchor::range_in_buffer(excerpt_id, hunks[0].buffer_range.clone());
+ let hunk_range = multibuffer_snapshot
+ .anchor_in_excerpt(hunks[0].buffer_range.start)
+ .unwrap()
+ ..multibuffer_snapshot
+ .anchor_in_excerpt(hunks[0].buffer_range.end)
+ .unwrap();
editor.toggle_single_diff_hunk(hunk_range, cx)
});
executor.run_until_parked();
@@ -25279,7 +25130,7 @@ async fn test_display_diff_hunks(cx: &mut TestAppContext) {
multibuffer.set_excerpts_for_path(
PathKey::with_sort_prefix(0, buffer.read(cx).file().unwrap().path().clone()),
buffer.clone(),
- vec![text::Anchor::MIN.to_point(&snapshot)..text::Anchor::MAX.to_point(&snapshot)],
+ vec![Point::zero()..snapshot.max_point()],
2,
cx,
);
@@ -25365,7 +25216,7 @@ async fn test_partially_staged_hunk(cx: &mut TestAppContext) {
cx.update_editor(|editor, window, cx| {
let snapshot = editor.snapshot(window, cx);
let hunks = editor
- .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot())
+ .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot())
.collect::<Vec<_>>();
assert_eq!(hunks.len(), 1);
assert_eq!(
@@ -26450,7 +26301,7 @@ async fn test_folded_buffers_cleared_on_excerpts_removed(cx: &mut TestAppContext
// `multi_buffer::Event::ExcerptsRemoved` event is emitted, which should be
// picked up by the editor and update the display map accordingly.
multi_buffer.update(cx, |multi_buffer, cx| {
- multi_buffer.remove_excerpts_for_path(PathKey::sorted(0), cx)
+ multi_buffer.remove_excerpts(PathKey::sorted(0), cx)
});
assert!(!editor.update(cx, |editor, cx| editor.has_any_buffer_folded(cx)));
}
@@ -26702,7 +26553,12 @@ async fn test_multi_buffer_navigation_with_folded_buffers(cx: &mut TestAppContex
);
let mut editor = Editor::new(EditorMode::full(), multi_buffer.clone(), None, window, cx);
- let buffer_ids = multi_buffer.read(cx).excerpt_buffer_ids();
+ let buffer_ids = multi_buffer
+ .read(cx)
+ .snapshot(cx)
+ .excerpts()
+ .map(|excerpt| excerpt.context.start.buffer_id)
+ .collect::<Vec<_>>();
// fold all but the second buffer, so that we test navigating between two
// adjacent folded buffers, as well as folded buffers at the start and
// end the multibuffer
@@ -27038,7 +26894,12 @@ async fn assert_highlighted_edits(
let text_anchor_edits = edits
.clone()
.into_iter()
- .map(|(range, edit)| (range.start.text_anchor..range.end.text_anchor, edit.into()))
+ .map(|(range, edit)| {
+ (
+ range.start.expect_text_anchor()..range.end.expect_text_anchor(),
+ edit.into(),
+ )
+ })
.collect::<Vec<_>>();
let edit_preview = window
@@ -27055,10 +26916,11 @@ async fn assert_highlighted_edits(
cx.update(|_window, cx| {
let highlighted_edits = edit_prediction_edit_text(
- snapshot.as_singleton().unwrap().2,
+ snapshot.as_singleton().unwrap(),
&edits,
&edit_preview,
include_deletions,
+ &snapshot,
cx,
);
assertion_fn(highlighted_edits, cx)
@@ -31479,12 +31341,8 @@ async fn test_paste_url_from_other_app_creates_markdown_link_selectively_in_mult
Point::new(1, 21)..Point::new(1, 25),
])
});
- let first_buffer_id = multi_buffer
- .read(cx)
- .excerpt_buffer_ids()
- .into_iter()
- .next()
- .unwrap();
+ let snapshot = multi_buffer.read(cx).snapshot(cx);
+ let first_buffer_id = snapshot.all_buffer_ids().next().unwrap();
let first_buffer = multi_buffer.read(cx).buffer(first_buffer_id).unwrap();
first_buffer.update(cx, |buffer, cx| {
buffer.set_language(Some(markdown_language.clone()), cx);
@@ -32530,7 +32388,12 @@ async fn test_multibuffer_selections_with_folding(cx: &mut TestAppContext) {
});
let mut cx = EditorTestContext::for_editor_in(editor.clone(), cx).await;
- let buffer_ids = cx.multibuffer(|mb, _| mb.excerpt_buffer_ids());
+ let buffer_ids = cx.multibuffer(|mb, cx| {
+ mb.snapshot(cx)
+ .excerpts()
+ .map(|excerpt| excerpt.context.start.buffer_id)
+ .collect::<Vec<_>>()
+ });
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
@@ -33770,7 +33633,7 @@ async fn test_diff_review_button_shown_when_ai_enabled(cx: &mut TestAppContext)
}
/// Helper function to create a DiffHunkKey for testing.
-/// Uses Anchor::min() as a placeholder anchor since these tests don't need
+/// Uses Anchor::Min as a placeholder anchor since these tests don't need
/// real buffer positioning.
fn test_hunk_key(file_path: &str) -> DiffHunkKey {
DiffHunkKey {
@@ -33779,7 +33642,7 @@ fn test_hunk_key(file_path: &str) -> DiffHunkKey {
} else {
Arc::from(util::rel_path::RelPath::unix(file_path).unwrap())
},
- hunk_start_anchor: Anchor::min(),
+ hunk_start_anchor: Anchor::Min,
}
}
@@ -33802,7 +33665,7 @@ fn add_test_comment(
comment: &str,
cx: &mut Context<Editor>,
) -> usize {
- editor.add_review_comment(key, comment.to_string(), Anchor::min()..Anchor::max(), cx)
+ editor.add_review_comment(key, comment.to_string(), Anchor::Min..Anchor::Max, cx)
}
#[gpui::test]
@@ -35838,3 +35701,75 @@ async fn test_align_selections_multicolumn(cx: &mut TestAppContext) {
cx.update_editor(|e, window, cx| e.align_selections(&AlignSelections, window, cx));
cx.assert_editor_state(after);
}
+
+#[gpui::test]
+async fn test_custom_fallback_highlights(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let mut cx = EditorTestContext::new(cx).await;
+ cx.set_state(indoc! {"fn main(self, variable: TType) {ˇ}"});
+
+ let variable_color = Hsla::green();
+ let function_color = Hsla::blue();
+
+ let test_cases = [
+ ("@variable", Some(variable_color)),
+ ("@type", None),
+ ("@type @variable", Some(variable_color)),
+ ("@variable @type", Some(variable_color)),
+ ("@variable @function", Some(function_color)),
+ ("@function @variable", Some(variable_color)),
+ ];
+
+ for (test_case, expected) in test_cases {
+ let custom_rust_lang = Arc::into_inner(rust_lang())
+ .unwrap()
+ .with_highlights_query(format! {r#"(type_identifier) {test_case}"#}.as_str())
+ .unwrap();
+ let theme = setup_syntax_highlighting(Arc::new(custom_rust_lang), &mut cx);
+ let expected = expected.map_or_else(Vec::new, |expected_color| {
+ vec![(24..29, HighlightStyle::color(expected_color))]
+ });
+
+ cx.update_editor(|editor, window, cx| {
+ let snapshot = editor.snapshot(window, cx);
+ assert_eq!(
+ expected,
+ snapshot.combined_highlights(MultiBufferOffset(0)..snapshot.buffer().len(), &theme),
+ "Test case with '{test_case}' highlights query did not pass",
+ );
+ });
+ }
+}
+
+fn setup_syntax_highlighting(
+ language: Arc<Language>,
+ cx: &mut EditorTestContext,
+) -> Arc<SyntaxTheme> {
+ let syntax = Arc::new(SyntaxTheme::new_test(vec![
+ ("keyword", Hsla::red()),
+ ("function", Hsla::blue()),
+ ("variable", Hsla::green()),
+ ("number", Hsla::default()),
+ ("operator", Hsla::default()),
+ ("punctuation.bracket", Hsla::default()),
+ ("punctuation.delimiter", Hsla::default()),
+ ]));
+
+ language.set_theme(&syntax);
+
+ cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx));
+ cx.executor().run_until_parked();
+ cx.update_editor(|editor, window, cx| {
+ editor.set_style(
+ EditorStyle {
+ syntax: syntax.clone(),
+ ..EditorStyle::default()
+ },
+ window,
+ cx,
+ );
+ });
+
+ syntax
+}
@@ -54,7 +54,7 @@ use itertools::Itertools;
use language::{HighlightedText, IndentGuideSettings, language_settings::ShowWhitespaceSetting};
use markdown::Markdown;
use multi_buffer::{
- Anchor, ExcerptId, ExcerptInfo, ExpandExcerptDirection, ExpandInfo, MultiBufferPoint,
+ Anchor, ExcerptBoundaryInfo, ExpandExcerptDirection, ExpandInfo, MultiBufferPoint,
MultiBufferRow, RowInfo,
};
@@ -1390,13 +1390,13 @@ impl EditorElement {
.snapshot
.display_point_to_anchor(valid_point, Bias::Left);
- if let Some((buffer_snapshot, file)) = position_map
+ if let Some((buffer_anchor, buffer_snapshot)) = position_map
.snapshot
.buffer_snapshot()
- .buffer_for_excerpt(buffer_anchor.excerpt_id)
- .and_then(|buffer| buffer.file().map(|file| (buffer, file)))
+ .anchor_to_buffer_anchor(buffer_anchor)
+ && let Some(file) = buffer_snapshot.file()
{
- let as_point = text::ToPoint::to_point(&buffer_anchor.text_anchor, buffer_snapshot);
+ let as_point = text::ToPoint::to_point(&buffer_anchor, buffer_snapshot);
let is_visible = editor
.gutter_breakpoint_indicator
@@ -1752,7 +1752,7 @@ impl EditorElement {
// Remote cursors
if let Some(collaboration_hub) = &editor.collaboration_hub {
for remote_selection in snapshot.remote_selections_in_range(
- &(Anchor::min()..Anchor::max()),
+ &(Anchor::Min..Anchor::Max),
collaboration_hub.deref(),
cx,
) {
@@ -2589,12 +2589,6 @@ impl EditorElement {
const INLINE_SLOT_CHAR_LIMIT: u32 = 4;
const MAX_ALTERNATE_DISTANCE: u32 = 8;
- let excerpt_id = snapshot
- .display_snapshot
- .buffer_snapshot()
- .excerpt_containing(buffer_point..buffer_point)
- .map(|excerpt| excerpt.id());
-
let is_valid_row = |row_candidate: u32| -> bool {
// move to other row if folded row
if snapshot.is_line_folded(MultiBufferRow(row_candidate)) {
@@ -2610,13 +2604,18 @@ impl EditorElement {
row: row_candidate,
column: 0,
};
- let candidate_excerpt_id = snapshot
+ // move to other row if different excerpt
+ let range = if candidate_point < buffer_point {
+ candidate_point..buffer_point
+ } else {
+ buffer_point..candidate_point
+ };
+ if snapshot
.display_snapshot
.buffer_snapshot()
- .excerpt_containing(candidate_point..candidate_point)
- .map(|excerpt| excerpt.id());
- // move to other row if different excerpt
- if excerpt_id != candidate_excerpt_id {
+ .excerpt_containing(range)
+ .is_none()
+ {
return false;
}
}
@@ -2796,7 +2795,7 @@ impl EditorElement {
.newest::<language::Point>(&editor_snapshot.display_snapshot)
.head();
- let Some((buffer, buffer_point, _)) = editor_snapshot
+ let Some((buffer, buffer_point)) = editor_snapshot
.buffer_snapshot()
.point_to_buffer_point(cursor_point)
else {
@@ -3389,8 +3388,8 @@ impl EditorElement {
.enumerate()
.map(|(ix, row_info)| {
let ExpandInfo {
- excerpt_id,
direction,
+ start_anchor,
} = row_info.expand_info?;
let icon_name = match direction {
@@ -3419,7 +3418,7 @@ impl EditorElement {
.width(width)
.on_click(move |_, window, cx| {
editor.update(cx, |editor, cx| {
- editor.expand_excerpt(excerpt_id, direction, window, cx);
+ editor.expand_excerpt(start_anchor, direction, window, cx);
});
})
.tooltip(Tooltip::for_action_title(
@@ -3886,7 +3885,7 @@ impl EditorElement {
selected_buffer_ids: &Vec<BufferId>,
latest_selection_anchors: &HashMap<BufferId, Anchor>,
is_row_soft_wrapped: impl Copy + Fn(usize) -> bool,
- sticky_header_excerpt_id: Option<ExcerptId>,
+ sticky_header_excerpt_id: Option<BufferId>,
indent_guides: &Option<Vec<IndentGuideLayout>>,
block_resize_offset: &mut i32,
window: &mut Window,
@@ -3974,7 +3973,7 @@ impl EditorElement {
let mut result = v_flex().id(block_id).w_full().pr(editor_margins.right);
if self.should_show_buffer_headers() {
- let selected = selected_buffer_ids.contains(&first_excerpt.buffer_id);
+ let selected = selected_buffer_ids.contains(&first_excerpt.buffer_id());
let jump_data = header_jump_data(
snapshot,
block_row_start,
@@ -4029,8 +4028,8 @@ impl EditorElement {
latest_selection_anchors,
);
- if sticky_header_excerpt_id != Some(excerpt.id) {
- let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
+ if sticky_header_excerpt_id != Some(excerpt.buffer_id()) {
+ let selected = selected_buffer_ids.contains(&excerpt.buffer_id());
result = result.child(div().pr(editor_margins.right).child(
self.render_buffer_header(
@@ -4190,7 +4189,7 @@ impl EditorElement {
fn render_buffer_header(
&self,
- for_excerpt: &ExcerptInfo,
+ for_excerpt: &ExcerptBoundaryInfo,
is_folded: bool,
is_selected: bool,
is_sticky: bool,
@@ -4227,7 +4226,7 @@ impl EditorElement {
selected_buffer_ids: &Vec<BufferId>,
latest_selection_anchors: &HashMap<BufferId, Anchor>,
is_row_soft_wrapped: impl Copy + Fn(usize) -> bool,
- sticky_header_excerpt_id: Option<ExcerptId>,
+ sticky_header_excerpt_id: Option<BufferId>,
indent_guides: &Option<Vec<IndentGuideLayout>>,
window: &mut Window,
cx: &mut App,
@@ -4520,7 +4519,7 @@ impl EditorElement {
let editor_bg_color = cx.theme().colors().editor_background;
- let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
+ let selected = selected_buffer_ids.contains(&excerpt.buffer_id());
let available_width = hitbox.bounds.size.width - right_margin;
@@ -7894,23 +7893,26 @@ impl EditorElement {
return;
}
let buffer_snapshot = &display_snapshot.buffer_snapshot();
- for (buffer, buffer_range, excerpt_id) in
- buffer_snapshot.range_to_buffer_ranges(anchor_range.start..=anchor_range.end)
+ for (excerpt_buffer_snapshot, buffer_range, _) in
+ buffer_snapshot.range_to_buffer_ranges(anchor_range.start..anchor_range.end)
{
- let buffer_range =
- buffer.anchor_after(buffer_range.start)..buffer.anchor_before(buffer_range.end);
+ let buffer_range = excerpt_buffer_snapshot.anchor_after(buffer_range.start)
+ ..excerpt_buffer_snapshot.anchor_before(buffer_range.end);
selections.extend(debug_ranges.ranges.iter().flat_map(|debug_range| {
- let player_color = theme
- .players()
- .color_for_participant(debug_range.occurrence_index as u32 + 1);
- debug_range.ranges.iter().filter_map(move |range| {
- if range.start.buffer_id != Some(buffer.remote_id()) {
+ debug_range.ranges.iter().filter_map(|range| {
+ let player_color = theme
+ .players()
+ .color_for_participant(debug_range.occurrence_index as u32 + 1);
+ if range.start.buffer_id != excerpt_buffer_snapshot.remote_id() {
return None;
}
- let clipped_start = range.start.max(&buffer_range.start, buffer);
- let clipped_end = range.end.min(&buffer_range.end, buffer);
+ let clipped_start = range
+ .start
+ .max(&buffer_range.start, &excerpt_buffer_snapshot);
+ let clipped_end =
+ range.end.min(&buffer_range.end, &excerpt_buffer_snapshot);
let range = buffer_snapshot
- .anchor_range_in_excerpt(excerpt_id, *clipped_start..*clipped_end)?;
+ .buffer_anchor_range_to_anchor_range(*clipped_start..*clipped_end)?;
let start = range.start.to_display_point(display_snapshot);
let end = range.end.to_display_point(display_snapshot);
let selection_layout = SelectionLayout {
@@ -8150,49 +8152,23 @@ pub(crate) fn header_jump_data(
editor_snapshot: &EditorSnapshot,
block_row_start: DisplayRow,
height: u32,
- first_excerpt: &ExcerptInfo,
+ first_excerpt: &ExcerptBoundaryInfo,
latest_selection_anchors: &HashMap<BufferId, Anchor>,
) -> JumpData {
- let jump_target = if let Some(anchor) = latest_selection_anchors.get(&first_excerpt.buffer_id)
- && let Some(range) = editor_snapshot.context_range_for_excerpt(anchor.excerpt_id)
- && let Some(buffer) = editor_snapshot
- .buffer_snapshot()
- .buffer_for_excerpt(anchor.excerpt_id)
+ let multibuffer_snapshot = editor_snapshot.buffer_snapshot();
+ let buffer = first_excerpt.buffer(multibuffer_snapshot);
+ let (jump_anchor, jump_buffer) = if let Some(anchor) =
+ latest_selection_anchors.get(&first_excerpt.buffer_id())
+ && let Some((jump_anchor, selection_buffer)) =
+ multibuffer_snapshot.anchor_to_buffer_anchor(*anchor)
{
- JumpTargetInExcerptInput {
- id: anchor.excerpt_id,
- buffer,
- excerpt_start_anchor: range.start,
- jump_anchor: anchor.text_anchor,
- }
+ (jump_anchor, selection_buffer)
} else {
- JumpTargetInExcerptInput {
- id: first_excerpt.id,
- buffer: &first_excerpt.buffer,
- excerpt_start_anchor: first_excerpt.range.context.start,
- jump_anchor: first_excerpt.range.primary.start,
- }
+ (first_excerpt.range.primary.start, buffer)
};
- header_jump_data_inner(editor_snapshot, block_row_start, height, &jump_target)
-}
-
-struct JumpTargetInExcerptInput<'a> {
- id: ExcerptId,
- buffer: &'a language::BufferSnapshot,
- excerpt_start_anchor: text::Anchor,
- jump_anchor: text::Anchor,
-}
-
-fn header_jump_data_inner(
- snapshot: &EditorSnapshot,
- block_row_start: DisplayRow,
- height: u32,
- for_excerpt: &JumpTargetInExcerptInput,
-) -> JumpData {
- let buffer = &for_excerpt.buffer;
- let jump_position = language::ToPoint::to_point(&for_excerpt.jump_anchor, buffer);
- let excerpt_start = for_excerpt.excerpt_start_anchor;
- let rows_from_excerpt_start = if for_excerpt.jump_anchor == excerpt_start {
+ let excerpt_start = first_excerpt.range.context.start;
+ let jump_position = language::ToPoint::to_point(&jump_anchor, jump_buffer);
+ let rows_from_excerpt_start = if jump_anchor == excerpt_start {
0
} else {
let excerpt_start_point = language::ToPoint::to_point(&excerpt_start, buffer);
@@ -8201,15 +8177,14 @@ fn header_jump_data_inner(
let line_offset_from_top = (block_row_start.0 + height + rows_from_excerpt_start)
.saturating_sub(
- snapshot
+ editor_snapshot
.scroll_anchor
- .scroll_position(&snapshot.display_snapshot)
+ .scroll_position(&editor_snapshot.display_snapshot)
.y as u32,
);
JumpData::MultiBufferPoint {
- excerpt_id: for_excerpt.id,
- anchor: for_excerpt.jump_anchor,
+ anchor: jump_anchor,
position: jump_position,
line_offset_from_top,
}
@@ -8217,7 +8192,7 @@ fn header_jump_data_inner(
pub(crate) fn render_buffer_header(
editor: &Entity<Editor>,
- for_excerpt: &ExcerptInfo,
+ for_excerpt: &ExcerptBoundaryInfo,
is_folded: bool,
is_selected: bool,
is_sticky: bool,
@@ -8229,6 +8204,8 @@ pub(crate) fn render_buffer_header(
let multi_buffer = editor_read.buffer.read(cx);
let is_read_only = editor_read.read_only(cx);
let editor_handle: &dyn ItemHandle = editor;
+ let multibuffer_snapshot = multi_buffer.snapshot(cx);
+ let buffer = for_excerpt.buffer(&multibuffer_snapshot);
let breadcrumbs = if is_selected {
editor_read.breadcrumbs_inner(cx)
@@ -8236,31 +8213,30 @@ pub(crate) fn render_buffer_header(
None
};
+ let buffer_id = for_excerpt.buffer_id();
let file_status = multi_buffer
.all_diff_hunks_expanded()
- .then(|| editor_read.status_for_buffer_id(for_excerpt.buffer_id, cx))
+ .then(|| editor_read.status_for_buffer_id(buffer_id, cx))
.flatten();
- let indicator = multi_buffer
- .buffer(for_excerpt.buffer_id)
- .and_then(|buffer| {
- let buffer = buffer.read(cx);
- let indicator_color = match (buffer.has_conflict(), buffer.is_dirty()) {
- (true, _) => Some(Color::Warning),
- (_, true) => Some(Color::Accent),
- (false, false) => None,
- };
- indicator_color.map(|indicator_color| Indicator::dot().color(indicator_color))
- });
+ let indicator = multi_buffer.buffer(buffer_id).and_then(|buffer| {
+ let buffer = buffer.read(cx);
+ let indicator_color = match (buffer.has_conflict(), buffer.is_dirty()) {
+ (true, _) => Some(Color::Warning),
+ (_, true) => Some(Color::Accent),
+ (false, false) => None,
+ };
+ indicator_color.map(|indicator_color| Indicator::dot().color(indicator_color))
+ });
let include_root = editor_read
.project
.as_ref()
.map(|project| project.read(cx).visible_worktrees(cx).count() > 1)
.unwrap_or_default();
- let file = for_excerpt.buffer.file();
+ let file = buffer.file();
let can_open_excerpts = file.is_none_or(|file| file.can_open());
let path_style = file.map(|file| file.path_style(cx));
- let relative_path = for_excerpt.buffer.resolve_file_path(include_root, cx);
+ let relative_path = buffer.resolve_file_path(include_root, cx);
let (parent_path, filename) = if let Some(path) = &relative_path {
if let Some(path_style) = path_style {
let (dir, file_name) = path_style.split(path);
@@ -8275,7 +8251,7 @@ pub(crate) fn render_buffer_header(
let colors = cx.theme().colors();
let header = div()
- .id(("buffer-header", for_excerpt.buffer_id.to_proto()))
+ .id(("buffer-header", buffer_id.to_proto()))
.p(BUFFER_HEADER_PADDING)
.w_full()
.h(FILE_HEADER_HEIGHT as f32 * window.line_height())
@@ -8303,7 +8279,7 @@ pub(crate) fn render_buffer_header(
.hover(|style| style.bg(colors.element_hover))
.map(|header| {
let editor = editor.clone();
- let buffer_id = for_excerpt.buffer_id;
+ let buffer_id = for_excerpt.buffer_id();
let toggle_chevron_icon =
FileIcons::get_chevron_icon(!is_folded, cx).map(Icon::from_path);
let button_size = rems_from_px(28.);
@@ -8367,7 +8343,7 @@ pub(crate) fn render_buffer_header(
.addons
.values()
.filter_map(|addon| {
- addon.render_buffer_header_controls(for_excerpt, window, cx)
+ addon.render_buffer_header_controls(for_excerpt, buffer, window, cx)
})
.take(1),
)
@@ -8460,7 +8436,7 @@ pub(crate) fn render_buffer_header(
),
)
})
- .when(!for_excerpt.buffer.capability.editable(), |el| {
+ .when(!buffer.capability.editable(), |el| {
el.child(Icon::new(IconName::FileLock).color(Color::Muted))
})
.when_some(breadcrumbs, |then, breadcrumbs| {
@@ -8511,7 +8487,7 @@ pub(crate) fn render_buffer_header(
})
.on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation())
.on_click(window.listener_for(editor, {
- let buffer_id = for_excerpt.buffer_id;
+ let buffer_id = for_excerpt.buffer_id();
move |editor, e: &ClickEvent, window, cx| {
if e.modifiers().alt {
editor.open_excerpts_common(
@@ -8533,7 +8509,7 @@ pub(crate) fn render_buffer_header(
),
);
- let file = for_excerpt.buffer.file().cloned();
+ let file = buffer.file().cloned();
let editor = editor.clone();
right_click_menu("buffer-header-context-menu")
@@ -9855,14 +9831,14 @@ impl Element for EditorElement {
};
let start_anchor = if start_row == Default::default() {
- Anchor::min()
+ Anchor::Min
} else {
snapshot.buffer_snapshot().anchor_before(
DisplayPoint::new(start_row, 0).to_offset(&snapshot, Bias::Left),
)
};
let end_anchor = if end_row > max_row {
- Anchor::max()
+ Anchor::Max
} else {
snapshot.buffer_snapshot().anchor_before(
DisplayPoint::new(end_row, 0).to_offset(&snapshot, Bias::Right),
@@ -9888,7 +9864,7 @@ impl Element for EditorElement {
editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(window, cx);
let start_anchor = if start_row == Default::default() {
- Anchor::min()
+ Anchor::Min
} else {
snapshot.buffer_snapshot().anchor_before(
DisplayPoint::new(start_row, 0)
@@ -9896,7 +9872,7 @@ impl Element for EditorElement {
)
};
let end_anchor = if end_row > max_row {
- Anchor::max()
+ Anchor::Max
} else {
snapshot.buffer_snapshot().anchor_before(
DisplayPoint::new(end_row, 0)
@@ -10052,9 +10028,11 @@ impl Element for EditorElement {
HashMap::default();
for selection in all_anchor_selections.iter() {
let head = selection.head();
- if let Some(buffer_id) = head.text_anchor.buffer_id {
+ if let Some((text_anchor, _)) =
+ snapshot.buffer_snapshot().anchor_to_buffer_anchor(head)
+ {
anchors_by_buffer
- .entry(buffer_id)
+ .entry(text_anchor.buffer_id)
.and_modify(|(latest_id, latest_anchor)| {
if selection.id > *latest_id {
*latest_id = selection.id;
@@ -10322,8 +10300,9 @@ impl Element for EditorElement {
} else {
None
};
- let sticky_header_excerpt_id =
- sticky_header_excerpt.as_ref().map(|top| top.excerpt.id);
+ let sticky_header_excerpt_id = sticky_header_excerpt
+ .as_ref()
+ .map(|top| top.excerpt.buffer_id());
let buffer = snapshot.buffer_snapshot();
let start_buffer_row = MultiBufferRow(start_anchor.to_point(&buffer).row);
@@ -12968,7 +12947,7 @@ mod tests {
editor.insert_blocks(
[BlockProperties {
style: BlockStyle::Fixed,
- placement: BlockPlacement::Above(Anchor::min()),
+ placement: BlockPlacement::Above(Anchor::Min),
height: Some(3),
render: Arc::new(|cx| div().h(3. * cx.window.line_height()).into_any()),
priority: 0,
@@ -21,9 +21,9 @@ impl Editor {
};
let buffers_to_query = self
- .visible_excerpts(true, cx)
- .into_values()
- .map(|(buffer, ..)| buffer)
+ .visible_buffers(cx)
+ .into_iter()
+ .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx))
.chain(for_buffer.and_then(|id| self.buffer.read(cx).buffer(id)))
.filter(|buffer| {
let id = buffer.read(cx).remote_id();
@@ -204,8 +204,8 @@ impl GitBlame {
git_blame.generate(cx);
}
}
- multi_buffer::Event::ExcerptsAdded { .. }
- | multi_buffer::Event::ExcerptsEdited { .. } => git_blame.regenerate_on_edit(cx),
+ multi_buffer::Event::BufferRangesUpdated { .. }
+ | multi_buffer::Event::BuffersEdited { .. } => git_blame.regenerate_on_edit(cx),
_ => {}
},
);
@@ -346,11 +346,10 @@ impl GitBlame {
let Some(multi_buffer) = self.multi_buffer.upgrade() else {
return;
};
- multi_buffer
- .read(cx)
- .excerpt_buffer_ids()
- .into_iter()
- .for_each(|id| self.sync(cx, id));
+ let snapshot = multi_buffer.read(cx).snapshot(cx);
+ for id in snapshot.all_buffer_ids() {
+ self.sync(cx, id)
+ }
}
fn sync(&mut self, cx: &mut App, buffer_id: BufferId) {
@@ -497,10 +496,10 @@ impl GitBlame {
}
let buffers_to_blame = self
.multi_buffer
- .update(cx, |multi_buffer, _| {
- multi_buffer
+ .update(cx, |multi_buffer, cx| {
+ let snapshot = multi_buffer.snapshot(cx);
+ snapshot
.all_buffer_ids()
- .into_iter()
.filter_map(|id| Some(multi_buffer.buffer(id)?.downgrade()))
.collect::<Vec<_>>()
})
@@ -237,7 +237,8 @@ impl Editor {
let Some(mb_anchor) = self
.buffer()
.read(cx)
- .buffer_anchor_to_anchor(&buffer, anchor, cx)
+ .snapshot(cx)
+ .anchor_in_excerpt(anchor)
else {
return Task::ready(Ok(Navigated::No));
};
@@ -324,16 +325,13 @@ pub fn show_link_definition(
return;
}
- let trigger_anchor = trigger_point.anchor();
- let anchor = snapshot.buffer_snapshot().anchor_before(*trigger_anchor);
- let Some(buffer) = editor.buffer().read(cx).buffer_for_anchor(anchor, cx) else {
+ let anchor = trigger_point.anchor().bias_left(snapshot.buffer_snapshot());
+ let Some((anchor, _)) = snapshot.buffer_snapshot().anchor_to_buffer_anchor(anchor) else {
+ return;
+ };
+ let Some(buffer) = editor.buffer.read(cx).buffer(anchor.buffer_id) else {
return;
};
- let Anchor {
- excerpt_id,
- text_anchor,
- ..
- } = anchor;
let same_kind = hovered_link_state.preferred_kind == preferred_kind
|| hovered_link_state
.links
@@ -363,39 +361,39 @@ pub fn show_link_definition(
async move {
let result = match &trigger_point {
TriggerPoint::Text(_) => {
- if let Some((url_range, url)) = find_url(&buffer, text_anchor, cx.clone()) {
+ if let Some((url_range, url)) = find_url(&buffer, anchor, cx.clone()) {
this.read_with(cx, |_, _| {
let range = maybe!({
let range =
- snapshot.anchor_range_in_excerpt(excerpt_id, url_range)?;
+ snapshot.buffer_anchor_range_to_anchor_range(url_range)?;
Some(RangeInEditor::Text(range))
});
(range, vec![HoverLink::Url(url)])
})
.ok()
} else if let Some((filename_range, filename)) =
- find_file(&buffer, project.clone(), text_anchor, cx).await
+ find_file(&buffer, project.clone(), anchor, cx).await
{
let range = maybe!({
let range =
- snapshot.anchor_range_in_excerpt(excerpt_id, filename_range)?;
+ snapshot.buffer_anchor_range_to_anchor_range(filename_range)?;
Some(RangeInEditor::Text(range))
});
Some((range, vec![HoverLink::File(filename)]))
} else if let Some(provider) = provider {
let task = cx.update(|_, cx| {
- provider.definitions(&buffer, text_anchor, preferred_kind, cx)
+ provider.definitions(&buffer, anchor, preferred_kind, cx)
})?;
if let Some(task) = task {
task.await.ok().flatten().map(|definition_result| {
(
definition_result.iter().find_map(|link| {
link.origin.as_ref().and_then(|origin| {
- let range = snapshot.anchor_range_in_excerpt(
- excerpt_id,
- origin.range.clone(),
- )?;
+ let range = snapshot
+ .buffer_anchor_range_to_anchor_range(
+ origin.range.clone(),
+ )?;
Some(RangeInEditor::Text(range))
})
}),
@@ -1168,7 +1166,7 @@ mod tests {
});
cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key());
cx.background_executor.run_until_parked();
- assert!(requests.try_next().is_err());
+ assert!(requests.try_recv().is_err());
cx.assert_editor_text_highlights(
HighlightKey::HoveredLinkState,
indoc! {"
@@ -1602,7 +1600,11 @@ mod tests {
cx.set_state(input);
let (position, snapshot) = cx.editor(|editor, _, cx| {
- let positions = editor.selections.newest_anchor().head().text_anchor;
+ let positions = editor
+ .selections
+ .newest_anchor()
+ .head()
+ .expect_text_anchor();
let snapshot = editor
.buffer()
.clone()
@@ -275,12 +275,12 @@ fn show_hover(
let snapshot = editor.snapshot(window, cx);
- let (buffer, buffer_position) = editor
+ let (buffer_position, _) = editor
.buffer
.read(cx)
- .text_anchor_for_position(anchor, cx)?;
-
- let (excerpt_id, _, _) = editor.buffer().read(cx).excerpt_containing(anchor, cx)?;
+ .snapshot(cx)
+ .anchor_to_buffer_anchor(anchor)?;
+ let buffer = editor.buffer.read(cx).buffer(buffer_position.buffer_id)?;
let language_registry = editor
.project()
@@ -515,7 +515,7 @@ fn show_hover(
.and_then(|range| {
let range = snapshot
.buffer_snapshot()
- .anchor_range_in_excerpt(excerpt_id, range)?;
+ .buffer_anchor_range_to_anchor_range(range)?;
Some(range)
})
.or_else(|| {
@@ -45,6 +45,7 @@ impl InlaySplice {
#[derive(Debug, Clone)]
pub struct Inlay {
pub id: InlayId,
+ // TODO this could be an ExcerptAnchor
pub position: Anchor,
pub content: InlayContent,
}
@@ -14,7 +14,7 @@ use language::{
language_settings::{InlayHintKind, InlayHintSettings},
};
use lsp::LanguageServerId;
-use multi_buffer::{Anchor, ExcerptId, MultiBufferSnapshot};
+use multi_buffer::{Anchor, MultiBufferSnapshot};
use project::{
HoverBlock, HoverBlockKind, InlayHintLabel, InlayHintLabelPartTooltip, InlayHintTooltip,
InvalidationStrategy, ResolveState,
@@ -110,14 +110,15 @@ impl LspInlayHintData {
&mut self,
buffer_ids: &HashSet<BufferId>,
current_hints: impl IntoIterator<Item = Inlay>,
+ snapshot: &MultiBufferSnapshot,
) {
for buffer_id in buffer_ids {
self.hint_refresh_tasks.remove(buffer_id);
self.hint_chunk_fetching.remove(buffer_id);
}
for hint in current_hints {
- if let Some(buffer_id) = hint.position.text_anchor.buffer_id {
- if buffer_ids.contains(&buffer_id) {
+ if let Some((text_anchor, _)) = snapshot.anchor_to_buffer_anchor(hint.position) {
+ if buffer_ids.contains(&text_anchor.buffer_id) {
self.added_hints.remove(&hint.id);
}
}
@@ -237,7 +238,7 @@ pub enum InlayHintRefreshReason {
server_id: LanguageServerId,
request_id: Option<usize>,
},
- ExcerptsRemoved(Vec<ExcerptId>),
+ BuffersRemoved(Vec<BufferId>),
}
impl Editor {
@@ -303,7 +304,7 @@ impl Editor {
let debounce = match &reason {
InlayHintRefreshReason::SettingsChange(_)
| InlayHintRefreshReason::Toggle(_)
- | InlayHintRefreshReason::ExcerptsRemoved(_)
+ | InlayHintRefreshReason::BuffersRemoved(_)
| InlayHintRefreshReason::ModifiersChanged(_) => None,
_may_need_lsp_call => self.inlay_hints.as_ref().and_then(|inlay_hints| {
if invalidate_cache.should_invalidate() {
@@ -314,7 +315,8 @@ impl Editor {
}),
};
- let mut visible_excerpts = self.visible_excerpts(true, cx);
+ let mut visible_excerpts = self.visible_buffer_ranges(cx);
+ visible_excerpts.retain(|(snapshot, _, _)| self.is_lsp_relevant(snapshot.file(), cx));
let mut invalidate_hints_for_buffers = HashSet::default();
let ignore_previous_fetches = match reason {
@@ -324,7 +326,7 @@ impl Editor {
| InlayHintRefreshReason::ServerRemoved => true,
InlayHintRefreshReason::NewLinesShown
| InlayHintRefreshReason::RefreshRequested { .. }
- | InlayHintRefreshReason::ExcerptsRemoved(_) => false,
+ | InlayHintRefreshReason::BuffersRemoved(_) => false,
InlayHintRefreshReason::BufferEdited(buffer_id) => {
let Some(affected_language) = self
.buffer()
@@ -351,8 +353,8 @@ impl Editor {
);
semantics_provider.invalidate_inlay_hints(&invalidate_hints_for_buffers, cx);
- visible_excerpts.retain(|_, (visible_buffer, _, _)| {
- visible_buffer.read(cx).language() == Some(&affected_language)
+ visible_excerpts.retain(|(buffer_snapshot, _, _)| {
+ buffer_snapshot.language() == Some(&affected_language)
});
false
}
@@ -371,6 +373,7 @@ impl Editor {
inlay_hints.clear_for_buffers(
&invalidate_hints_for_buffers,
Self::visible_inlay_hints(self.display_map.read(cx)),
+ &multi_buffer.read(cx).snapshot(cx),
);
}
}
@@ -379,14 +382,18 @@ impl Editor {
.extend(invalidate_hints_for_buffers);
let mut buffers_to_query = HashMap::default();
- for (_, (buffer, buffer_version, visible_range)) in visible_excerpts {
- let buffer_id = buffer.read(cx).remote_id();
+ for (buffer_snapshot, visible_range, _) in visible_excerpts {
+ let buffer_id = buffer_snapshot.remote_id();
if !self.registered_buffers.contains_key(&buffer_id) {
continue;
}
- let buffer_snapshot = buffer.read(cx).snapshot();
+ let Some(buffer) = multi_buffer.read(cx).buffer(buffer_id) else {
+ continue;
+ };
+
+ let buffer_version = buffer_snapshot.version().clone();
let buffer_anchor_range = buffer_snapshot.anchor_before(visible_range.start)
..buffer_snapshot.anchor_after(visible_range.end);
@@ -514,13 +521,14 @@ impl Editor {
}
}
}
- InlayHintRefreshReason::ExcerptsRemoved(excerpts_removed) => {
+ InlayHintRefreshReason::BuffersRemoved(buffers_removed) => {
let to_remove = self
.display_map
.read(cx)
.current_inlays()
.filter_map(|inlay| {
- if excerpts_removed.contains(&inlay.position.excerpt_id) {
+ let anchor = inlay.position.raw_text_anchor()?;
+ if buffers_removed.contains(&anchor.buffer_id) {
Some(inlay.id)
} else {
None
@@ -610,13 +618,11 @@ impl Editor {
})
.max_by_key(|hint| hint.id)
{
- if let Some(ResolvedHint::Resolved(cached_hint)) = hovered_hint
- .position
- .text_anchor
- .buffer_id
- .and_then(|buffer_id| {
+ if let Some(ResolvedHint::Resolved(cached_hint)) = buffer_snapshot
+ .anchor_to_buffer_anchor(hovered_hint.position)
+ .and_then(|(anchor, _)| {
lsp_store.update(cx, |lsp_store, cx| {
- lsp_store.resolved_hint(buffer_id, hovered_hint.id, cx)
+ lsp_store.resolved_hint(anchor.buffer_id, hovered_hint.id, cx)
})
})
{
@@ -787,15 +793,19 @@ impl Editor {
new_hints: Vec<(Range<BufferRow>, anyhow::Result<CacheInlayHints>)>,
cx: &mut Context<Self>,
) {
+ let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx);
let visible_inlay_hint_ids = Self::visible_inlay_hints(self.display_map.read(cx))
- .filter(|inlay| inlay.position.text_anchor.buffer_id == Some(buffer_id))
+ .filter(|inlay| {
+ multi_buffer_snapshot
+ .anchor_to_buffer_anchor(inlay.position)
+ .map(|(anchor, _)| anchor.buffer_id)
+ == Some(buffer_id)
+ })
.map(|inlay| inlay.id)
.collect::<Vec<_>>();
let Some(inlay_hints) = &mut self.inlay_hints else {
return;
};
-
- let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx);
let Some(buffer_snapshot) = self
.buffer
.read(cx)
@@ -910,12 +920,10 @@ impl Editor {
hints_to_remove.extend(
Self::visible_inlay_hints(self.display_map.read(cx))
.filter(|inlay| {
- inlay
- .position
- .text_anchor
- .buffer_id
- .is_none_or(|buffer_id| {
- invalidate_hints_for_buffers.contains(&buffer_id)
+ multi_buffer_snapshot
+ .anchor_to_buffer_anchor(inlay.position)
+ .is_none_or(|(anchor, _)| {
+ invalidate_hints_for_buffers.contains(&anchor.buffer_id)
})
})
.map(|inlay| inlay.id),
@@ -2285,17 +2293,15 @@ pub mod tests {
cx: &mut gpui::TestAppContext,
) -> Range<Point> {
let ranges = editor
- .update(cx, |editor, _window, cx| editor.visible_excerpts(true, cx))
+ .update(cx, |editor, _window, cx| editor.visible_buffer_ranges(cx))
.unwrap();
assert_eq!(
ranges.len(),
1,
"Single buffer should produce a single excerpt with visible range"
);
- let (_, (excerpt_buffer, _, excerpt_visible_range)) = ranges.into_iter().next().unwrap();
- excerpt_buffer.read_with(cx, |buffer, _| {
- excerpt_visible_range.to_point(&buffer.snapshot())
- })
+ let (buffer_snapshot, visible_range, _) = ranges.into_iter().next().unwrap();
+ visible_range.to_point(&buffer_snapshot)
}
#[gpui::test]
@@ -2968,7 +2974,7 @@ let c = 3;"#
.await
.unwrap();
let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
- let (buffer_1_excerpts, buffer_2_excerpts) = multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.update(cx, |multibuffer, cx| {
multibuffer.set_excerpts_for_path(
PathKey::sorted(0),
buffer_1.clone(),
@@ -2983,15 +2989,8 @@ let c = 3;"#
0,
cx,
);
- let excerpt_ids = multibuffer.excerpt_ids();
- let buffer_1_excerpts = vec![excerpt_ids[0]];
- let buffer_2_excerpts = vec![excerpt_ids[1]];
- (buffer_1_excerpts, buffer_2_excerpts)
});
- assert!(!buffer_1_excerpts.is_empty());
- assert!(!buffer_2_excerpts.is_empty());
-
cx.executor().run_until_parked();
let editor = cx.add_window(|window, cx| {
Editor::for_multibuffer(multibuffer, Some(project.clone()), window, cx)
@@ -3092,7 +3091,7 @@ let c = 3;"#
editor
.update(cx, |editor, _, cx| {
editor.buffer().update(cx, |multibuffer, cx| {
- multibuffer.remove_excerpts_for_path(PathKey::sorted(1), cx);
+ multibuffer.remove_excerpts(PathKey::sorted(1), cx);
})
})
.unwrap();
@@ -1,7 +1,7 @@
use crate::{
ActiveDebugLine, Anchor, Autoscroll, BufferSerialization, Capability, Editor, EditorEvent,
- EditorSettings, ExcerptId, ExcerptRange, FormatTarget, MultiBuffer, MultiBufferSnapshot,
- NavigationData, ReportEditorEvent, SelectionEffects, ToPoint as _,
+ EditorSettings, ExcerptRange, FormatTarget, MultiBuffer, MultiBufferSnapshot, NavigationData,
+ ReportEditorEvent, SelectionEffects, ToPoint as _,
display_map::HighlightKey,
editor_settings::SeedQuerySetting,
persistence::{EditorDb, SerializedEditor},
@@ -22,7 +22,7 @@ use language::{
SelectionGoal, proto::serialize_anchor as serialize_text_anchor,
};
use lsp::DiagnosticSeverity;
-use multi_buffer::MultiBufferOffset;
+use multi_buffer::{MultiBufferOffset, PathKey};
use project::{
File, Project, ProjectItem as _, ProjectPath, lsp_store::FormatTrigger,
project_settings::ProjectSettings, search::SearchQuery,
@@ -33,14 +33,13 @@ use std::{
any::{Any, TypeId},
borrow::Cow,
cmp::{self, Ordering},
- iter,
ops::Range,
path::{Path, PathBuf},
sync::Arc,
};
use text::{BufferId, BufferSnapshot, Selection};
use ui::{IconDecorationKind, prelude::*};
-use util::{ResultExt, TryFutureExt, paths::PathExt};
+use util::{ResultExt, TryFutureExt, paths::PathExt, rel_path::RelPath};
use workspace::item::{Dedup, ItemSettings, SerializableItem, TabContentParams};
use workspace::{
CollaboratorId, ItemId, ItemNavHistory, ToolbarItemLocation, ViewId, Workspace, WorkspaceId,
@@ -83,10 +82,11 @@ impl FollowableItem for Editor {
};
let buffer_ids = state
- .excerpts
+ .path_excerpts
.iter()
.map(|excerpt| excerpt.buffer_id)
.collect::<HashSet<_>>();
+
let buffers = project.update(cx, |project, cx| {
buffer_ids
.iter()
@@ -106,38 +106,32 @@ impl FollowableItem for Editor {
multibuffer = MultiBuffer::singleton(buffers.pop().unwrap(), cx)
} else {
multibuffer = MultiBuffer::new(project.read(cx).capability());
- let mut sorted_excerpts = state.excerpts.clone();
- sorted_excerpts.sort_by_key(|e| e.id);
- let sorted_excerpts = sorted_excerpts.into_iter().peekable();
-
- for excerpt in sorted_excerpts {
- let Ok(buffer_id) = BufferId::new(excerpt.buffer_id) else {
+ for path_with_ranges in state.path_excerpts {
+ let Some(path_key) =
+ path_with_ranges.path_key.and_then(deserialize_path_key)
+ else {
continue;
};
-
- let mut insert_position = ExcerptId::min();
- for e in &state.excerpts {
- if e.id == excerpt.id {
- break;
- }
- if e.id < excerpt.id {
- insert_position = ExcerptId::from_proto(e.id);
- }
- }
-
- let buffer =
- buffers.iter().find(|b| b.read(cx).remote_id() == buffer_id);
-
- let Some(excerpt) = deserialize_excerpt_range(excerpt) else {
+ let Some(buffer_id) = BufferId::new(path_with_ranges.buffer_id).ok()
+ else {
continue;
};
-
- let Some(buffer) = buffer else { continue };
-
- multibuffer.insert_excerpts_with_ids_after(
- insert_position,
+ let Some(buffer) =
+ buffers.iter().find(|b| b.read(cx).remote_id() == buffer_id)
+ else {
+ continue;
+ };
+ let buffer_snapshot = buffer.read(cx).snapshot();
+ let ranges = path_with_ranges
+ .ranges
+ .into_iter()
+ .filter_map(deserialize_excerpt_range)
+ .collect::<Vec<_>>();
+ multibuffer.update_path_excerpts(
+ path_key,
buffer.clone(),
- [excerpt],
+ &buffer_snapshot,
+ &ranges,
cx,
);
}
@@ -158,6 +152,7 @@ impl FollowableItem for Editor {
})
})?;
+ editor.update(cx, |editor, cx| editor.text(cx));
update_editor_from_message(
editor.downgrade(),
project,
@@ -215,38 +210,43 @@ impl FollowableItem for Editor {
let display_snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let scroll_anchor = self.scroll_manager.native_anchor(&display_snapshot, cx);
let buffer = self.buffer.read(cx);
- let excerpts = buffer
- .read(cx)
- .excerpts()
- .map(|(id, buffer, range)| proto::Excerpt {
- id: id.to_proto(),
- buffer_id: buffer.remote_id().into(),
- context_start: Some(serialize_text_anchor(&range.context.start)),
- context_end: Some(serialize_text_anchor(&range.context.end)),
- primary_start: Some(serialize_text_anchor(&range.primary.start)),
- primary_end: Some(serialize_text_anchor(&range.primary.end)),
- })
- .collect();
let snapshot = buffer.snapshot(cx);
+ let mut path_excerpts: Vec<proto::PathExcerpts> = Vec::new();
+ for excerpt in snapshot.excerpts() {
+ if let Some(prev_entry) = path_excerpts.last_mut()
+ && prev_entry.buffer_id == excerpt.context.start.buffer_id.to_proto()
+ {
+ prev_entry.ranges.push(serialize_excerpt_range(excerpt));
+ } else if let Some(path_key) = snapshot.path_for_buffer(excerpt.context.start.buffer_id)
+ {
+ path_excerpts.push(proto::PathExcerpts {
+ path_key: Some(serialize_path_key(path_key)),
+ buffer_id: excerpt.context.start.buffer_id.to_proto(),
+ ranges: vec![serialize_excerpt_range(excerpt)],
+ });
+ }
+ }
Some(proto::view::Variant::Editor(proto::view::Editor {
singleton: buffer.is_singleton(),
title: buffer.explicit_title().map(ToOwned::to_owned),
- excerpts,
- scroll_top_anchor: Some(serialize_anchor(&scroll_anchor.anchor, &snapshot)),
+ excerpts: Vec::new(),
+ scroll_top_anchor: Some(serialize_anchor(&scroll_anchor.anchor)),
scroll_x: scroll_anchor.offset.x,
scroll_y: scroll_anchor.offset.y,
selections: self
.selections
.disjoint_anchors_arc()
.iter()
- .map(|s| serialize_selection(s, &snapshot))
+ .map(serialize_selection)
.collect(),
pending_selection: self
.selections
.pending_anchor()
.as_ref()
- .map(|s| serialize_selection(s, &snapshot)),
+ .copied()
+ .map(serialize_selection),
+ path_excerpts,
}))
}
@@ -277,56 +277,52 @@ impl FollowableItem for Editor {
match update {
proto::update_view::Variant::Editor(update) => match event {
- EditorEvent::ExcerptsAdded {
+ EditorEvent::BufferRangesUpdated {
buffer,
- predecessor,
- excerpts,
+ path_key,
+ ranges,
} => {
- let buffer_id = buffer.read(cx).remote_id();
- let mut excerpts = excerpts.iter();
- if let Some((id, range)) = excerpts.next() {
- update.inserted_excerpts.push(proto::ExcerptInsertion {
- previous_excerpt_id: Some(predecessor.to_proto()),
- excerpt: serialize_excerpt(buffer_id, id, range),
- });
- update.inserted_excerpts.extend(excerpts.map(|(id, range)| {
- proto::ExcerptInsertion {
- previous_excerpt_id: None,
- excerpt: serialize_excerpt(buffer_id, id, range),
- }
- }))
- }
+ let buffer_id = buffer.read(cx).remote_id().to_proto();
+ let path_key = serialize_path_key(path_key);
+ let ranges = ranges
+ .iter()
+ .cloned()
+ .map(serialize_excerpt_range)
+ .collect::<Vec<_>>();
+ update.updated_paths.push(proto::PathExcerpts {
+ path_key: Some(path_key),
+ buffer_id,
+ ranges,
+ });
true
}
- EditorEvent::ExcerptsRemoved { ids, .. } => {
+ EditorEvent::BuffersRemoved { removed_buffer_ids } => {
update
- .deleted_excerpts
- .extend(ids.iter().copied().map(ExcerptId::to_proto));
+ .deleted_buffers
+ .extend(removed_buffer_ids.iter().copied().map(BufferId::to_proto));
true
}
EditorEvent::ScrollPositionChanged { autoscroll, .. } if !autoscroll => {
let display_snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx));
- let snapshot = self.buffer.read(cx).snapshot(cx);
let scroll_anchor = self.scroll_manager.native_anchor(&display_snapshot, cx);
- update.scroll_top_anchor =
- Some(serialize_anchor(&scroll_anchor.anchor, &snapshot));
+ update.scroll_top_anchor = Some(serialize_anchor(&scroll_anchor.anchor));
update.scroll_x = scroll_anchor.offset.x;
update.scroll_y = scroll_anchor.offset.y;
true
}
EditorEvent::SelectionsChanged { .. } => {
- let snapshot = self.buffer.read(cx).snapshot(cx);
update.selections = self
.selections
.disjoint_anchors_arc()
.iter()
- .map(|s| serialize_selection(s, &snapshot))
+ .map(serialize_selection)
.collect();
update.pending_selection = self
.selections
.pending_anchor()
.as_ref()
- .map(|s| serialize_selection(s, &snapshot));
+ .copied()
+ .map(serialize_selection);
true
}
_ => false,
@@ -370,7 +366,7 @@ impl FollowableItem for Editor {
) {
let buffer = self.buffer.read(cx);
let buffer = buffer.read(cx);
- let Some(position) = buffer.as_singleton_anchor(location) else {
+ let Some(position) = buffer.anchor_in_excerpt(location) else {
return;
};
let selection = Selection {
@@ -394,9 +390,9 @@ async fn update_editor_from_message(
) -> Result<()> {
// Open all of the buffers of which excerpts were added to the editor.
let inserted_excerpt_buffer_ids = message
- .inserted_excerpts
+ .updated_paths
.iter()
- .filter_map(|insertion| Some(insertion.excerpt.as_ref()?.buffer_id))
+ .map(|insertion| insertion.buffer_id)
.collect::<HashSet<_>>();
let inserted_excerpt_buffers = project.update(cx, |project, cx| {
inserted_excerpt_buffer_ids
@@ -407,66 +403,53 @@ async fn update_editor_from_message(
let _inserted_excerpt_buffers = try_join_all(inserted_excerpt_buffers).await?;
// Update the editor's excerpts.
- this.update(cx, |editor, cx| {
+ let buffer_snapshot = this.update(cx, |editor, cx| {
editor.buffer.update(cx, |multibuffer, cx| {
- let mut removed_excerpt_ids = message
- .deleted_excerpts
- .into_iter()
- .map(ExcerptId::from_proto)
- .collect::<Vec<_>>();
- removed_excerpt_ids.sort_by({
- let multibuffer = multibuffer.read(cx);
- move |a, b| a.cmp(b, &multibuffer)
- });
-
- let mut insertions = message.inserted_excerpts.into_iter().peekable();
- while let Some(insertion) = insertions.next() {
- let Some(excerpt) = insertion.excerpt else {
+ for path_with_excerpts in message.updated_paths {
+ let Some(path_key) = path_with_excerpts.path_key.and_then(deserialize_path_key)
+ else {
continue;
};
- let Some(previous_excerpt_id) = insertion.previous_excerpt_id else {
- continue;
- };
- let buffer_id = BufferId::new(excerpt.buffer_id)?;
- let Some(buffer) = project.read(cx).buffer_for_id(buffer_id, cx) else {
+ let ranges = path_with_excerpts
+ .ranges
+ .into_iter()
+ .filter_map(deserialize_excerpt_range)
+ .collect::<Vec<_>>();
+ let Some(buffer) = BufferId::new(path_with_excerpts.buffer_id)
+ .ok()
+ .and_then(|buffer_id| project.read(cx).buffer_for_id(buffer_id, cx))
+ else {
continue;
};
- let adjacent_excerpts = iter::from_fn(|| {
- let insertion = insertions.peek()?;
- if insertion.previous_excerpt_id.is_none()
- && insertion.excerpt.as_ref()?.buffer_id == u64::from(buffer_id)
- {
- insertions.next()?.excerpt
- } else {
- None
- }
- });
+ let buffer_snapshot = buffer.read(cx).snapshot();
+ multibuffer.update_path_excerpts(path_key, buffer, &buffer_snapshot, &ranges, cx);
+ }
- multibuffer.insert_excerpts_with_ids_after(
- ExcerptId::from_proto(previous_excerpt_id),
- buffer,
- [excerpt]
- .into_iter()
- .chain(adjacent_excerpts)
- .filter_map(deserialize_excerpt_range),
- cx,
- );
+ for buffer_id in message
+ .deleted_buffers
+ .into_iter()
+ .filter_map(|buffer_id| BufferId::new(buffer_id).ok())
+ {
+ multibuffer.remove_excerpts_for_buffer(buffer_id, cx);
}
- multibuffer.remove_excerpts(removed_excerpt_ids, cx);
- anyhow::Ok(())
+ multibuffer.snapshot(cx)
})
- })??;
+ })?;
// Deserialize the editor state.
let selections = message
.selections
.into_iter()
- .filter_map(deserialize_selection)
+ .filter_map(|selection| deserialize_selection(selection, &buffer_snapshot))
.collect::<Vec<_>>();
- let pending_selection = message.pending_selection.and_then(deserialize_selection);
- let scroll_top_anchor = message.scroll_top_anchor.and_then(deserialize_anchor);
+ let pending_selection = message
+ .pending_selection
+ .and_then(|selection| deserialize_selection(selection, &buffer_snapshot));
+ let scroll_top_anchor = message
+ .scroll_top_anchor
+ .and_then(|selection| deserialize_anchor(selection, &buffer_snapshot));
// Wait until the buffer has received all of the operations referenced by
// the editor's new state.
@@ -503,79 +486,103 @@ async fn update_editor_from_message(
Ok(())
}
-fn serialize_excerpt(
- buffer_id: BufferId,
- id: &ExcerptId,
- range: &ExcerptRange<language::Anchor>,
-) -> Option<proto::Excerpt> {
- Some(proto::Excerpt {
- id: id.to_proto(),
- buffer_id: buffer_id.into(),
- context_start: Some(serialize_text_anchor(&range.context.start)),
- context_end: Some(serialize_text_anchor(&range.context.end)),
- primary_start: Some(serialize_text_anchor(&range.primary.start)),
- primary_end: Some(serialize_text_anchor(&range.primary.end)),
- })
-}
-
-fn serialize_selection(
- selection: &Selection<Anchor>,
- buffer: &MultiBufferSnapshot,
-) -> proto::Selection {
+fn serialize_selection(selection: &Selection<Anchor>) -> proto::Selection {
proto::Selection {
id: selection.id as u64,
- start: Some(serialize_anchor(&selection.start, buffer)),
- end: Some(serialize_anchor(&selection.end, buffer)),
+ start: Some(serialize_anchor(&selection.start)),
+ end: Some(serialize_anchor(&selection.end)),
reversed: selection.reversed,
}
}
-fn serialize_anchor(anchor: &Anchor, buffer: &MultiBufferSnapshot) -> proto::EditorAnchor {
- proto::EditorAnchor {
- excerpt_id: buffer.latest_excerpt_id(anchor.excerpt_id).to_proto(),
- anchor: Some(serialize_text_anchor(&anchor.text_anchor)),
+fn serialize_anchor(anchor: &Anchor) -> proto::EditorAnchor {
+ match anchor {
+ Anchor::Min => proto::EditorAnchor {
+ excerpt_id: None,
+ anchor: Some(proto::Anchor {
+ replica_id: 0,
+ timestamp: 0,
+ offset: 0,
+ bias: proto::Bias::Left as i32,
+ buffer_id: None,
+ }),
+ },
+ Anchor::Excerpt(_) => proto::EditorAnchor {
+ excerpt_id: None,
+ anchor: anchor.raw_text_anchor().map(|a| serialize_text_anchor(&a)),
+ },
+ Anchor::Max => proto::EditorAnchor {
+ excerpt_id: None,
+ anchor: Some(proto::Anchor {
+ replica_id: u32::MAX,
+ timestamp: u32::MAX,
+ offset: u64::MAX,
+ bias: proto::Bias::Right as i32,
+ buffer_id: None,
+ }),
+ },
+ }
+}
+
+fn serialize_excerpt_range(range: ExcerptRange<language::Anchor>) -> proto::ExcerptRange {
+ let context_start = language::proto::serialize_anchor(&range.context.start);
+ let context_end = language::proto::serialize_anchor(&range.context.end);
+ let primary_start = language::proto::serialize_anchor(&range.primary.start);
+ let primary_end = language::proto::serialize_anchor(&range.primary.end);
+ proto::ExcerptRange {
+ context_start: Some(context_start),
+ context_end: Some(context_end),
+ primary_start: Some(primary_start),
+ primary_end: Some(primary_end),
}
}
fn deserialize_excerpt_range(
- excerpt: proto::Excerpt,
-) -> Option<(ExcerptId, ExcerptRange<language::Anchor>)> {
+ excerpt_range: proto::ExcerptRange,
+) -> Option<ExcerptRange<language::Anchor>> {
let context = {
- let start = language::proto::deserialize_anchor(excerpt.context_start?)?;
- let end = language::proto::deserialize_anchor(excerpt.context_end?)?;
+ let start = language::proto::deserialize_anchor(excerpt_range.context_start?)?;
+ let end = language::proto::deserialize_anchor(excerpt_range.context_end?)?;
start..end
};
- let primary = excerpt
+ let primary = excerpt_range
.primary_start
- .zip(excerpt.primary_end)
+ .zip(excerpt_range.primary_end)
.and_then(|(start, end)| {
let start = language::proto::deserialize_anchor(start)?;
let end = language::proto::deserialize_anchor(end)?;
Some(start..end)
})
.unwrap_or_else(|| context.clone());
- Some((
- ExcerptId::from_proto(excerpt.id),
- ExcerptRange { context, primary },
- ))
+ Some(ExcerptRange { context, primary })
}
-fn deserialize_selection(selection: proto::Selection) -> Option<Selection<Anchor>> {
+fn deserialize_selection(
+ selection: proto::Selection,
+ buffer: &MultiBufferSnapshot,
+) -> Option<Selection<Anchor>> {
Some(Selection {
id: selection.id as usize,
- start: deserialize_anchor(selection.start?)?,
- end: deserialize_anchor(selection.end?)?,
+ start: deserialize_anchor(selection.start?, buffer)?,
+ end: deserialize_anchor(selection.end?, buffer)?,
reversed: selection.reversed,
goal: SelectionGoal::None,
})
}
-fn deserialize_anchor(anchor: proto::EditorAnchor) -> Option<Anchor> {
- let excerpt_id = ExcerptId::from_proto(anchor.excerpt_id);
- Some(Anchor::in_buffer(
- excerpt_id,
- language::proto::deserialize_anchor(anchor.anchor?)?,
- ))
+fn deserialize_anchor(anchor: proto::EditorAnchor, buffer: &MultiBufferSnapshot) -> Option<Anchor> {
+ let anchor = anchor.anchor?;
+ if let Some(buffer_id) = anchor.buffer_id
+ && BufferId::new(buffer_id).is_ok()
+ {
+ let text_anchor = language::proto::deserialize_anchor(anchor)?;
+ buffer.anchor_in_buffer(text_anchor)
+ } else {
+ match proto::Bias::from_i32(anchor.bias)? {
+ proto::Bias::Left => Some(Anchor::Min),
+ proto::Bias::Right => Some(Anchor::Max),
+ }
+ }
}
impl Item for Editor {
@@ -1071,7 +1078,7 @@ impl Item for Editor {
f(ItemEvent::UpdateBreadcrumbs);
}
- EditorEvent::ExcerptsAdded { .. } | EditorEvent::ExcerptsRemoved { .. } => {
+ EditorEvent::BufferRangesUpdated { .. } | EditorEvent::BuffersRemoved { .. } => {
f(ItemEvent::Edit);
}
@@ -1434,9 +1441,9 @@ impl ProjectItem for Editor {
cx: &mut Context<Self>,
) -> Self {
let mut editor = Self::for_buffer(buffer.clone(), Some(project), window, cx);
+ let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx);
- if let Some((excerpt_id, _, snapshot)) =
- editor.buffer().read(cx).snapshot(cx).as_singleton()
+ if let Some(buffer_snapshot) = editor.buffer().read(cx).snapshot(cx).as_singleton()
&& WorkspaceSettings::get(None, cx).restore_on_file_reopen
&& let Some(restoration_data) = Self::project_item_kind()
.and_then(|kind| pane.as_ref()?.project_item_restoration_data.get(&kind))
@@ -1448,7 +1455,7 @@ impl ProjectItem for Editor {
{
if !restoration_data.folds.is_empty() {
editor.fold_ranges(
- clip_ranges(&restoration_data.folds, snapshot),
+ clip_ranges(&restoration_data.folds, buffer_snapshot),
false,
window,
cx,
@@ -1456,12 +1463,11 @@ impl ProjectItem for Editor {
}
if !restoration_data.selections.is_empty() {
editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
- s.select_ranges(clip_ranges(&restoration_data.selections, snapshot));
+ s.select_ranges(clip_ranges(&restoration_data.selections, buffer_snapshot));
});
}
let (top_row, offset) = restoration_data.scroll_position;
- let anchor =
- Anchor::in_buffer(excerpt_id, snapshot.anchor_before(Point::new(top_row, 0)));
+ let anchor = multibuffer_snapshot.anchor_before(Point::new(top_row, 0));
editor.set_scroll_anchor(ScrollAnchor { anchor, offset }, window, cx);
}
@@ -1624,6 +1630,7 @@ impl SearchableItem for Editor {
regex: true,
replacement: false,
selection: false,
+ select_all: true,
find_in_results: true,
}
} else {
@@ -1633,6 +1640,7 @@ impl SearchableItem for Editor {
regex: true,
replacement: true,
selection: true,
+ select_all: true,
find_in_results: false,
}
}
@@ -1838,7 +1846,7 @@ impl SearchableItem for Editor {
};
for range in search_within_ranges {
- for (search_buffer, search_range, excerpt_id, deleted_hunk_anchor) in
+ for (search_buffer, search_range, deleted_hunk_anchor) in
buffer.range_to_buffer_ranges_with_deleted_hunks(range)
{
ranges.extend(
@@ -1849,20 +1857,22 @@ impl SearchableItem for Editor {
)
.await
.into_iter()
- .map(|match_range| {
+ .filter_map(|match_range| {
if let Some(deleted_hunk_anchor) = deleted_hunk_anchor {
let start = search_buffer
.anchor_after(search_range.start + match_range.start);
let end = search_buffer
.anchor_before(search_range.start + match_range.end);
- deleted_hunk_anchor.with_diff_base_anchor(start)
- ..deleted_hunk_anchor.with_diff_base_anchor(end)
+ Some(
+ deleted_hunk_anchor.with_diff_base_anchor(start)
+ ..deleted_hunk_anchor.with_diff_base_anchor(end),
+ )
} else {
let start = search_buffer
.anchor_after(search_range.start + match_range.start);
let end = search_buffer
.anchor_before(search_range.start + match_range.end);
- Anchor::range_in_buffer(excerpt_id, start..end)
+ buffer.buffer_anchor_range_to_anchor_range(start..end)
}
}),
);
@@ -2050,6 +2060,20 @@ fn restore_serialized_buffer_contents(
}
}
+fn serialize_path_key(path_key: &PathKey) -> proto::PathKey {
+ proto::PathKey {
+ sort_prefix: path_key.sort_prefix,
+ path: path_key.path.to_proto(),
+ }
+}
+
+fn deserialize_path_key(path_key: proto::PathKey) -> Option<PathKey> {
+ Some(PathKey {
+ sort_prefix: path_key.sort_prefix,
+ path: RelPath::from_proto(&path_key.path).ok()?,
+ })
+}
+
#[cfg(test)]
mod tests {
use crate::editor_tests::init_test;
@@ -352,11 +352,12 @@ pub(crate) fn construct_initial_buffer_versions_map<
}
for (edit_range, _) in edits {
- let edit_range_buffer = editor
- .buffer()
- .read(cx)
- .excerpt_containing(edit_range.end, cx)
- .map(|e| e.1);
+ let multibuffer = editor.buffer.read(cx);
+ let snapshot = multibuffer.snapshot(cx);
+ let anchor = snapshot.anchor_before(edit_range.end);
+ let edit_range_buffer = snapshot
+ .anchor_to_buffer_anchor(anchor)
+ .and_then(|(text_anchor, _)| multibuffer.buffer(text_anchor.buffer_id));
if let Some(buffer) = edit_range_buffer {
let (buffer_id, buffer_version) =
buffer.read_with(cx, |buffer, _| (buffer.remote_id(), buffer.version.clone()));
@@ -2,7 +2,6 @@ use collections::HashMap;
use gpui::{AppContext, Context, Entity, Window};
use itertools::Itertools;
use language::Buffer;
-use multi_buffer::MultiBufferOffset;
use std::{ops::Range, sync::Arc, time::Duration};
use text::{Anchor, AnchorRangeExt, Bias, BufferId, ToOffset, ToPoint};
use util::ResultExt;
@@ -62,27 +61,15 @@ pub(super) fn refresh_linked_ranges(
editor
.update(cx, |editor, cx| {
let display_snapshot = editor.display_snapshot(cx);
- let selections = editor
- .selections
- .all::<MultiBufferOffset>(&display_snapshot);
+ let selections = editor.selections.all_anchors(&display_snapshot);
let snapshot = display_snapshot.buffer_snapshot();
let buffer = editor.buffer.read(cx);
- for selection in selections {
- let cursor_position = selection.head();
- let start_position = snapshot.anchor_before(cursor_position);
- let end_position = snapshot.anchor_after(selection.tail());
- if start_position.text_anchor.buffer_id != end_position.text_anchor.buffer_id
- || end_position.text_anchor.buffer_id.is_none()
+ for selection in selections.iter() {
+ if let Some((_, range)) =
+ snapshot.anchor_range_to_buffer_anchor_range(selection.range())
+ && let Some(buffer) = buffer.buffer(range.start.buffer_id)
{
- // Throw away selections spanning multiple buffers.
- continue;
- }
- if let Some(buffer) = buffer.buffer_for_anchor(end_position, cx) {
- applicable_selections.push((
- buffer,
- start_position.text_anchor,
- end_position.text_anchor,
- ));
+ applicable_selections.push((buffer, range.start, range.end));
}
}
})
@@ -9,7 +9,6 @@ use language::Buffer;
use language::Language;
use lsp::LanguageServerId;
use lsp::LanguageServerName;
-use multi_buffer::Anchor;
use project::LanguageServerToQuery;
use project::LocationLink;
use project::Project;
@@ -27,7 +26,12 @@ pub(crate) fn find_specific_language_server_in_selection<F>(
cx: &mut App,
filter_language: F,
language_server_name: LanguageServerName,
-) -> Option<(Anchor, Arc<Language>, LanguageServerId, Entity<Buffer>)>
+) -> Option<(
+ text::Anchor,
+ Arc<Language>,
+ LanguageServerId,
+ Entity<Buffer>,
+)>
where
F: Fn(&Language) -> bool,
{
@@ -40,19 +44,15 @@ where
.iter()
.find_map(|selection| {
let multi_buffer = multi_buffer.read(cx);
- let (position, buffer) = multi_buffer
- .buffer_for_anchor(selection.head(), cx)
- .map(|buffer| (selection.head(), buffer))
- .or_else(|| {
- multi_buffer
- .buffer_for_anchor(selection.tail(), cx)
- .map(|buffer| (selection.tail(), buffer))
- })?;
+ let multi_buffer_snapshot = multi_buffer.snapshot(cx);
+ let (position, buffer) = multi_buffer_snapshot
+ .anchor_to_buffer_anchor(selection.head())
+ .and_then(|(anchor, _)| Some((anchor, multi_buffer.buffer(anchor.buffer_id)?)))?;
if !seen_buffer_ids.insert(buffer.read(cx).remote_id()) {
return None;
}
- let language = buffer.read(cx).language_at(position.text_anchor)?;
+ let language = buffer.read(cx).language_at(position)?;
if filter_language(&language) {
let server_id = buffer.update(cx, |buffer, cx| {
project
@@ -108,7 +108,7 @@ pub fn lsp_tasks(
let buffers = buffer_ids
.iter()
.filter(|&&buffer_id| match for_position {
- Some(for_position) => for_position.buffer_id == Some(buffer_id),
+ Some(for_position) => for_position.buffer_id == buffer_id,
None => true,
})
.filter_map(|&buffer_id| project.read(cx).buffer_for_id(buffer_id, cx))
@@ -194,7 +194,7 @@ mod tests {
use language::{FakeLspAdapter, Language};
use languages::rust_lang;
use lsp::{LanguageServerId, LanguageServerName};
- use multi_buffer::{Anchor, MultiBuffer};
+ use multi_buffer::MultiBuffer;
use project::{FakeFs, Project};
use util::path;
@@ -236,7 +236,7 @@ mod tests {
let filter = |language: &Language| language.name().as_ref() == "Rust";
let assert_result = |result: Option<(
- Anchor,
+ text::Anchor,
Arc<Language>,
LanguageServerId,
Entity<language::Buffer>,
@@ -205,16 +205,17 @@ pub fn deploy_context_menu(
.all::<PointUtf16>(&display_map)
.into_iter()
.any(|s| !s.is_empty());
- let has_git_repo = buffer
- .buffer_id_for_anchor(anchor)
- .is_some_and(|buffer_id| {
- project
- .read(cx)
- .git_store()
- .read(cx)
- .repository_and_path_for_buffer_id(buffer_id, cx)
- .is_some()
- });
+ let has_git_repo =
+ buffer
+ .anchor_to_buffer_anchor(anchor)
+ .is_some_and(|(buffer_anchor, _)| {
+ project
+ .read(cx)
+ .git_store()
+ .read(cx)
+ .repository_and_path_for_buffer_id(buffer_anchor.buffer_id, cx)
+ .is_some()
+ });
let evaluate_selection = window.is_action_available(&EvaluateSelectedText, cx);
let run_to_cursor = window.is_action_available(&RunToCursor, cx);
@@ -588,22 +588,30 @@ pub fn start_of_excerpt(
direction: Direction,
) -> DisplayPoint {
let point = map.display_point_to_point(display_point, Bias::Left);
- let Some(excerpt) = map.buffer_snapshot().excerpt_containing(point..point) else {
+ let Some((_, excerpt_range)) = map.buffer_snapshot().excerpt_containing(point..point) else {
return display_point;
};
match direction {
Direction::Prev => {
- let mut start = excerpt.start_anchor().to_display_point(map);
+ let Some(start_anchor) = map.anchor_in_excerpt(excerpt_range.context.start) else {
+ return display_point;
+ };
+ let mut start = start_anchor.to_display_point(map);
if start >= display_point && start.row() > DisplayRow(0) {
- let Some(excerpt) = map.buffer_snapshot().excerpt_before(excerpt.id()) else {
+ let Some(excerpt) = map.buffer_snapshot().excerpt_before(start_anchor) else {
return display_point;
};
- start = excerpt.start_anchor().to_display_point(map);
+ if let Some(start_anchor) = map.anchor_in_excerpt(excerpt.context.start) {
+ start = start_anchor.to_display_point(map);
+ }
}
start
}
Direction::Next => {
- let mut end = excerpt.end_anchor().to_display_point(map);
+ let Some(end_anchor) = map.anchor_in_excerpt(excerpt_range.context.end) else {
+ return display_point;
+ };
+ let mut end = end_anchor.to_display_point(map);
*end.row_mut() += 1;
map.clip_point(end, Bias::Right)
}
@@ -616,12 +624,15 @@ pub fn end_of_excerpt(
direction: Direction,
) -> DisplayPoint {
let point = map.display_point_to_point(display_point, Bias::Left);
- let Some(excerpt) = map.buffer_snapshot().excerpt_containing(point..point) else {
+ let Some((_, excerpt_range)) = map.buffer_snapshot().excerpt_containing(point..point) else {
return display_point;
};
match direction {
Direction::Prev => {
- let mut start = excerpt.start_anchor().to_display_point(map);
+ let Some(start_anchor) = map.anchor_in_excerpt(excerpt_range.context.start) else {
+ return display_point;
+ };
+ let mut start = start_anchor.to_display_point(map);
if start.row() > DisplayRow(0) {
*start.row_mut() -= 1;
}
@@ -630,18 +641,23 @@ pub fn end_of_excerpt(
start
}
Direction::Next => {
- let mut end = excerpt.end_anchor().to_display_point(map);
+ let Some(end_anchor) = map.anchor_in_excerpt(excerpt_range.context.end) else {
+ return display_point;
+ };
+ let mut end = end_anchor.to_display_point(map);
*end.column_mut() = 0;
if end <= display_point {
*end.row_mut() += 1;
let point_end = map.display_point_to_point(end, Bias::Right);
- let Some(excerpt) = map
+ let Some((_, excerpt_range)) = map
.buffer_snapshot()
.excerpt_containing(point_end..point_end)
else {
return display_point;
};
- end = excerpt.end_anchor().to_display_point(map);
+ if let Some(end_anchor) = map.anchor_in_excerpt(excerpt_range.context.end) {
+ end = end_anchor.to_display_point(map);
+ }
*end.column_mut() = 0;
}
end
@@ -8,9 +8,7 @@ use gpui::{
};
use language::{Buffer, BufferRow, Runnable};
use lsp::LanguageServerName;
-use multi_buffer::{
- Anchor, BufferOffset, MultiBufferOffset, MultiBufferRow, MultiBufferSnapshot, ToPoint as _,
-};
+use multi_buffer::{Anchor, BufferOffset, MultiBufferRow, MultiBufferSnapshot, ToPoint as _};
use project::{
Location, Project, TaskSourceKind,
debugger::breakpoint_store::{Breakpoint, BreakpointSessionState},
@@ -165,7 +163,7 @@ impl Editor {
.update(cx, |editor, cx| {
let multi_buffer = editor.buffer().read(cx);
if multi_buffer.is_singleton() {
- Some((multi_buffer.snapshot(cx), Anchor::min()..Anchor::max()))
+ Some((multi_buffer.snapshot(cx), Anchor::Min..Anchor::Max))
} else {
let display_snapshot =
editor.display_map.update(cx, |map, cx| map.snapshot(cx));
@@ -209,16 +207,8 @@ impl Editor {
.fold(HashMap::default(), |mut acc, (kind, location, task)| {
let buffer = location.target.buffer;
let buffer_snapshot = buffer.read(cx).snapshot();
- let offset = multi_buffer_snapshot.excerpts().find_map(
- |(excerpt_id, snapshot, _)| {
- if snapshot.remote_id() == buffer_snapshot.remote_id() {
- multi_buffer_snapshot
- .anchor_in_excerpt(excerpt_id, location.target.range.start)
- } else {
- None
- }
- },
- );
+ let offset =
+ multi_buffer_snapshot.anchor_in_excerpt(location.target.range.start);
if let Some(offset) = offset {
let task_buffer_range =
location.target.range.to_point(&buffer_snapshot);
@@ -369,20 +359,23 @@ impl Editor {
(selection, buffer, snapshot)
};
let selection_range = selection.range();
- let start = editor_snapshot
+ let Some((_, range)) = editor_snapshot
.display_snapshot
.buffer_snapshot()
- .anchor_after(selection_range.start)
- .text_anchor;
- let end = editor_snapshot
- .display_snapshot
- .buffer_snapshot()
- .anchor_after(selection_range.end)
- .text_anchor;
- let location = Location {
- buffer,
- range: start..end,
+ .anchor_range_to_buffer_anchor_range(
+ editor_snapshot
+ .display_snapshot
+ .buffer_snapshot()
+ .anchor_after(selection_range.start)
+ ..editor_snapshot
+ .display_snapshot
+ .buffer_snapshot()
+ .anchor_before(selection_range.end),
+ )
+ else {
+ return Task::ready(None);
};
+ let location = Location { buffer, range };
let captured_variables = {
let mut variables = TaskVariables::default();
let buffer = location.buffer.read(cx);
@@ -430,9 +423,9 @@ impl Editor {
return HashMap::default();
}
let buffers = if visible_only {
- self.visible_excerpts(true, cx)
- .into_values()
- .map(|(buffer, _, _)| buffer)
+ self.visible_buffers(cx)
+ .into_iter()
+ .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx))
.collect()
} else {
self.buffer().read(cx).all_buffers()
@@ -482,19 +475,15 @@ impl Editor {
cx: &mut Context<Self>,
) -> Option<(Entity<Buffer>, u32, Arc<RunnableTasks>)> {
let snapshot = self.buffer.read(cx).snapshot(cx);
- let offset = self
- .selections
- .newest::<MultiBufferOffset>(&self.display_snapshot(cx))
- .head();
- let mut excerpt = snapshot.excerpt_containing(offset..offset)?;
- let offset = excerpt.map_offset_to_buffer(offset);
- let buffer_id = excerpt.buffer().remote_id();
+ let anchor = self.selections.newest_anchor().head();
+ let (anchor, buffer_snapshot) = snapshot.anchor_to_buffer_anchor(anchor)?;
+ let offset = anchor.to_offset(buffer_snapshot);
- let layer = excerpt.buffer().syntax_layer_at(offset)?;
+ let layer = buffer_snapshot.syntax_layer_at(offset)?;
let mut cursor = layer.node().walk();
- while cursor.goto_first_child_for_byte(offset.0).is_some() {
- if cursor.node().end_byte() == offset.0 {
+ while cursor.goto_first_child_for_byte(offset).is_some() {
+ if cursor.node().end_byte() == offset {
cursor.goto_next_sibling();
}
}
@@ -503,18 +492,18 @@ impl Editor {
loop {
let node = cursor.node();
let node_range = node.byte_range();
- let symbol_start_row = excerpt.buffer().offset_to_point(node.start_byte()).row;
+ let symbol_start_row = buffer_snapshot.offset_to_point(node.start_byte()).row;
// Check if this node contains our offset
- if node_range.start <= offset.0 && node_range.end >= offset.0 {
+ if node_range.start <= offset && node_range.end >= offset {
// If it contains offset, check for task
if let Some(tasks) = self
.runnables
.runnables
- .get(&buffer_id)
+ .get(&buffer_snapshot.remote_id())
.and_then(|(_, tasks)| tasks.get(&symbol_start_row))
{
- let buffer = self.buffer.read(cx).buffer(buffer_id)?;
+ let buffer = self.buffer.read(cx).buffer(buffer_snapshot.remote_id())?;
return Some((buffer, symbol_start_row, Arc::new(tasks.to_owned())));
}
}
@@ -88,7 +88,7 @@ pub fn go_to_parent_module(
let request = proto::LspExtGoToParentModule {
project_id,
buffer_id: buffer_id.to_proto(),
- position: Some(serialize_anchor(&trigger_anchor.text_anchor)),
+ position: Some(serialize_anchor(&trigger_anchor)),
};
let response = client
.request(request)
@@ -106,7 +106,7 @@ pub fn go_to_parent_module(
.context("go to parent module via collab")?
} else {
let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
- let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot);
+ let position = trigger_anchor.to_point_utf16(&buffer_snapshot);
project
.update(cx, |project, cx| {
project.request_lsp(
@@ -168,7 +168,7 @@ pub fn expand_macro_recursively(
let request = proto::LspExtExpandMacro {
project_id,
buffer_id: buffer_id.to_proto(),
- position: Some(serialize_anchor(&trigger_anchor.text_anchor)),
+ position: Some(serialize_anchor(&trigger_anchor)),
};
let response = client
.request(request)
@@ -180,7 +180,7 @@ pub fn expand_macro_recursively(
}
} else {
let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
- let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot);
+ let position = trigger_anchor.to_point_utf16(&buffer_snapshot);
project
.update(cx, |project, cx| {
project.request_lsp(
@@ -195,10 +195,7 @@ pub fn expand_macro_recursively(
};
if macro_expansion.is_empty() {
- log::info!(
- "Empty macro expansion for position {:?}",
- trigger_anchor.text_anchor
- );
+ log::info!("Empty macro expansion for position {:?}", trigger_anchor);
return Ok(());
}
@@ -260,7 +257,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu
let request = proto::LspExtOpenDocs {
project_id,
buffer_id: buffer_id.to_proto(),
- position: Some(serialize_anchor(&trigger_anchor.text_anchor)),
+ position: Some(serialize_anchor(&trigger_anchor)),
};
let response = client
.request(request)
@@ -272,7 +269,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu
}
} else {
let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
- let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot);
+ let position = trigger_anchor.to_point_utf16(&buffer_snapshot);
project
.update(cx, |project, cx| {
project.request_lsp(
@@ -287,10 +284,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu
};
if docs_urls.is_empty() {
- log::debug!(
- "Empty docs urls for position {:?}",
- trigger_anchor.text_anchor
- );
+ log::debug!("Empty docs urls for position {:?}", trigger_anchor);
return Ok(());
}
@@ -322,16 +316,18 @@ fn cancel_flycheck_action(
let Some(project) = &editor.project else {
return;
};
+ let multibuffer_snapshot = editor
+ .buffer
+ .read_with(cx, |buffer, cx| buffer.snapshot(cx));
let buffer_id = editor
.selections
.disjoint_anchors_arc()
.iter()
.find_map(|selection| {
- let buffer_id = selection
- .start
- .text_anchor
- .buffer_id
- .or(selection.end.text_anchor.buffer_id)?;
+ let buffer_id = multibuffer_snapshot
+ .anchor_to_buffer_anchor(selection.start)?
+ .0
+ .buffer_id;
let project = project.read(cx);
let entry_id = project
.buffer_for_id(buffer_id, cx)?
@@ -351,16 +347,18 @@ fn run_flycheck_action(
let Some(project) = &editor.project else {
return;
};
+ let multibuffer_snapshot = editor
+ .buffer
+ .read_with(cx, |buffer, cx| buffer.snapshot(cx));
let buffer_id = editor
.selections
.disjoint_anchors_arc()
.iter()
.find_map(|selection| {
- let buffer_id = selection
- .start
- .text_anchor
- .buffer_id
- .or(selection.end.text_anchor.buffer_id)?;
+ let buffer_id = multibuffer_snapshot
+ .anchor_to_buffer_anchor(selection.head())?
+ .0
+ .buffer_id;
let project = project.read(cx);
let entry_id = project
.buffer_for_id(buffer_id, cx)?
@@ -380,16 +378,18 @@ fn clear_flycheck_action(
let Some(project) = &editor.project else {
return;
};
+ let multibuffer_snapshot = editor
+ .buffer
+ .read_with(cx, |buffer, cx| buffer.snapshot(cx));
let buffer_id = editor
.selections
.disjoint_anchors_arc()
.iter()
.find_map(|selection| {
- let buffer_id = selection
- .start
- .text_anchor
- .buffer_id
- .or(selection.end.text_anchor.buffer_id)?;
+ let buffer_id = multibuffer_snapshot
+ .anchor_to_buffer_anchor(selection.head())?
+ .0
+ .buffer_id;
let project = project.read(cx);
let entry_id = project
.buffer_for_id(buffer_id, cx)?
@@ -44,13 +44,13 @@ impl ScrollAnchor {
pub(super) fn new() -> Self {
Self {
offset: gpui::Point::default(),
- anchor: Anchor::min(),
+ anchor: Anchor::Min,
}
}
pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> gpui::Point<ScrollOffset> {
self.offset.apply_along(Axis::Vertical, |offset| {
- if self.anchor == Anchor::min() {
+ if self.anchor == Anchor::Min {
0.
} else {
let scroll_top = self.anchor.to_display_point(snapshot).row().as_f64();
@@ -78,7 +78,7 @@ impl Editor {
let selection_head = self.selections.newest_display(&display_snapshot).head();
let sticky_headers_len = if EditorSettings::get_global(cx).sticky_scroll.enabled
- && let Some((_, _, buffer_snapshot)) = display_snapshot.buffer_snapshot().as_singleton()
+ && let Some(buffer_snapshot) = display_snapshot.buffer_snapshot().as_singleton()
{
let select_head_point =
rope::Point::new(selection_head.to_point(&display_snapshot).row, 0);
@@ -4,7 +4,6 @@ use std::{
sync::Arc,
};
-use collections::HashMap;
use gpui::Pixels;
use itertools::Itertools as _;
use language::{Bias, Point, PointUtf16, Selection, SelectionGoal};
@@ -12,7 +11,7 @@ use multi_buffer::{MultiBufferDimension, MultiBufferOffset};
use util::post_inc;
use crate::{
- Anchor, DisplayPoint, DisplayRow, ExcerptId, MultiBufferSnapshot, SelectMode, ToOffset,
+ Anchor, DisplayPoint, DisplayRow, MultiBufferSnapshot, SelectMode, ToOffset,
display_map::{DisplaySnapshot, ToDisplayPoint},
movement::TextLayoutDetails,
};
@@ -45,8 +44,8 @@ impl SelectionsCollection {
pending: Some(PendingSelection {
selection: Selection {
id: 0,
- start: Anchor::min(),
- end: Anchor::min(),
+ start: Anchor::Min,
+ end: Anchor::Min,
reversed: false,
goal: SelectionGoal::None,
},
@@ -547,13 +546,11 @@ impl SelectionsCollection {
);
assert!(
snapshot.can_resolve(&selection.start),
- "disjoint selection start is not resolvable for the given snapshot:\n{selection:?}, {excerpt:?}",
- excerpt = snapshot.buffer_for_excerpt(selection.start.excerpt_id).map(|snapshot| snapshot.remote_id()),
+ "disjoint selection start is not resolvable for the given snapshot:\n{selection:?}",
);
assert!(
snapshot.can_resolve(&selection.end),
- "disjoint selection end is not resolvable for the given snapshot: {selection:?}, {excerpt:?}",
- excerpt = snapshot.buffer_for_excerpt(selection.end.excerpt_id).map(|snapshot| snapshot.remote_id()),
+ "disjoint selection start is not resolvable for the given snapshot:\n{selection:?}",
);
});
assert!(
@@ -572,17 +569,11 @@ impl SelectionsCollection {
);
assert!(
snapshot.can_resolve(&selection.start),
- "pending selection start is not resolvable for the given snapshot: {pending:?}, {excerpt:?}",
- excerpt = snapshot
- .buffer_for_excerpt(selection.start.excerpt_id)
- .map(|snapshot| snapshot.remote_id()),
+ "pending selection start is not resolvable for the given snapshot: {pending:?}",
);
assert!(
snapshot.can_resolve(&selection.end),
- "pending selection end is not resolvable for the given snapshot: {pending:?}, {excerpt:?}",
- excerpt = snapshot
- .buffer_for_excerpt(selection.end.excerpt_id)
- .map(|snapshot| snapshot.remote_id()),
+ "pending selection end is not resolvable for the given snapshot: {pending:?}",
);
}
}
@@ -665,10 +656,10 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> {
self.disjoint
.iter()
.filter(|selection| {
- if let Some(selection_buffer_id) =
- self.snapshot.buffer_id_for_anchor(selection.start)
+ if let Some((selection_buffer_anchor, _)) =
+ self.snapshot.anchor_to_buffer_anchor(selection.start)
{
- let should_remove = selection_buffer_id == buffer_id;
+ let should_remove = selection_buffer_anchor.buffer_id == buffer_id;
changed |= should_remove;
!should_remove
} else {
@@ -683,10 +674,8 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> {
let buffer_snapshot = self.snapshot.buffer_snapshot();
let anchor = buffer_snapshot
.excerpts()
- .find(|(_, buffer, _)| buffer.remote_id() == buffer_id)
- .and_then(|(excerpt_id, _, range)| {
- buffer_snapshot.anchor_in_excerpt(excerpt_id, range.context.start)
- })
+ .find(|excerpt| excerpt.context.start.buffer_id == buffer_id)
+ .and_then(|excerpt| buffer_snapshot.anchor_in_excerpt(excerpt.context.start))
.unwrap_or_else(|| self.snapshot.anchor_before(MultiBufferOffset(0)));
self.collection.disjoint = Arc::from([Selection {
id: post_inc(&mut self.collection.next_selection_id),
@@ -1077,80 +1066,6 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> {
self.selections_changed = true;
self.pending.as_mut().map(|pending| &mut pending.selection)
}
-
- /// Compute new ranges for any selections that were located in excerpts that have
- /// since been removed.
- ///
- /// Returns a `HashMap` indicating which selections whose former head position
- /// was no longer present. The keys of the map are selection ids. The values are
- /// the id of the new excerpt where the head of the selection has been moved.
- pub fn refresh(&mut self) -> HashMap<usize, ExcerptId> {
- let mut pending = self.collection.pending.take();
- let mut selections_with_lost_position = HashMap::default();
-
- let anchors_with_status = {
- let disjoint_anchors = self
- .disjoint
- .iter()
- .flat_map(|selection| [&selection.start, &selection.end]);
- self.snapshot.refresh_anchors(disjoint_anchors)
- };
- let adjusted_disjoint: Vec<_> = anchors_with_status
- .chunks(2)
- .map(|selection_anchors| {
- let (anchor_ix, start, kept_start) = selection_anchors[0];
- let (_, end, kept_end) = selection_anchors[1];
- let selection = &self.disjoint[anchor_ix / 2];
- let kept_head = if selection.reversed {
- kept_start
- } else {
- kept_end
- };
- if !kept_head {
- selections_with_lost_position.insert(selection.id, selection.head().excerpt_id);
- }
-
- Selection {
- id: selection.id,
- start,
- end,
- reversed: selection.reversed,
- goal: selection.goal,
- }
- })
- .collect();
-
- if !adjusted_disjoint.is_empty() {
- let map = self.display_snapshot();
- let resolved_selections =
- resolve_selections_wrapping_blocks(adjusted_disjoint.iter(), &map).collect();
- self.select::<MultiBufferOffset>(resolved_selections);
- }
-
- if let Some(pending) = pending.as_mut() {
- let anchors = self
- .snapshot
- .refresh_anchors([&pending.selection.start, &pending.selection.end]);
- let (_, start, kept_start) = anchors[0];
- let (_, end, kept_end) = anchors[1];
- let kept_head = if pending.selection.reversed {
- kept_start
- } else {
- kept_end
- };
- if !kept_head {
- selections_with_lost_position
- .insert(pending.selection.id, pending.selection.head().excerpt_id);
- }
-
- pending.selection.start = start;
- pending.selection.end = end;
- }
- self.collection.pending = pending;
- self.selections_changed = true;
-
- selections_with_lost_position
- }
}
impl Deref for MutableSelectionsCollection<'_, '_> {
@@ -148,9 +148,9 @@ impl Editor {
};
let buffers_to_query = self
- .visible_excerpts(true, cx)
- .into_values()
- .map(|(buffer, ..)| buffer)
+ .visible_buffers(cx)
+ .into_iter()
+ .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx))
.chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
.filter_map(|editor_buffer| {
let editor_buffer_id = editor_buffer.read(cx).remote_id();
@@ -365,11 +365,20 @@ fn convert_token(
modifiers: u32,
) -> Option<HighlightStyle> {
let rules = stylizer.rules_for_token(token_type)?;
- let matching = rules.iter().filter(|rule| {
- rule.token_modifiers
- .iter()
- .all(|m| stylizer.has_modifier(modifiers, m))
- });
+ let matching: Vec<_> = rules
+ .iter()
+ .filter(|rule| {
+ rule.token_modifiers
+ .iter()
+ .all(|m| stylizer.has_modifier(modifiers, m))
+ })
+ .collect();
+
+ if let Some(rule) = matching.last() {
+ if rule.no_style_defined() {
+ return None;
+ }
+ }
let mut highlight = HighlightStyle::default();
let mut empty = true;
@@ -463,7 +472,9 @@ mod tests {
};
use futures::StreamExt as _;
- use gpui::{AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext};
+ use gpui::{
+ AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext, UpdateGlobal as _,
+ };
use language::{Language, LanguageConfig, LanguageMatcher};
use languages::FakeLspAdapter;
use multi_buffer::{
@@ -472,7 +483,10 @@ mod tests {
use project::Project;
use rope::Point;
use serde_json::json;
- use settings::{LanguageSettingsContent, SemanticTokenRules, SemanticTokens, SettingsStore};
+ use settings::{
+ GlobalLspSettingsContent, LanguageSettingsContent, SemanticTokenRule, SemanticTokenRules,
+ SemanticTokens, SettingsStore,
+ };
use workspace::{MultiWorkspace, WorkspaceHandle as _};
use crate::{
@@ -1214,11 +1228,19 @@ mod tests {
);
// Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
- let toml_excerpt_id =
- editor.read_with(cx, |editor, cx| editor.buffer().read(cx).excerpt_ids()[0]);
+ let toml_anchor = editor.read_with(cx, |editor, cx| {
+ editor
+ .buffer()
+ .read(cx)
+ .snapshot(cx)
+ .anchor_in_excerpt(text::Anchor::min_for_buffer(
+ toml_buffer.read(cx).remote_id(),
+ ))
+ .unwrap()
+ });
editor.update_in(cx, |editor, _, cx| {
editor.buffer().update(cx, |buffer, cx| {
- buffer.expand_excerpts([toml_excerpt_id], 2, ExpandExcerptDirection::Down, cx);
+ buffer.expand_excerpts([toml_anchor], 2, ExpandExcerptDirection::Down, cx);
});
});
@@ -1816,6 +1838,256 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_semantic_token_disabling_with_empty_rule(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+ update_test_language_settings(cx, &|s| {
+ s.languages.0.insert(
+ "Rust".into(),
+ LanguageSettingsContent {
+ semantic_tokens: Some(SemanticTokens::Full),
+ ..Default::default()
+ },
+ );
+ });
+
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ semantic_tokens_provider: Some(
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+ lsp::SemanticTokensOptions {
+ legend: lsp::SemanticTokensLegend {
+ token_types: vec!["function".into()],
+ token_modifiers: vec![],
+ },
+ full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+ ..Default::default()
+ },
+ ),
+ ),
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
+
+ let mut full_request = cx
+ .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
+ move |_, _, _| async move {
+ Ok(Some(lsp::SemanticTokensResult::Tokens(
+ lsp::SemanticTokens {
+ data: vec![0, 3, 4, 0, 0],
+ result_id: None,
+ },
+ )))
+ },
+ );
+
+ // Verify it highlights by default
+ cx.set_state("ˇfn main() {}");
+ full_request.next().await;
+ cx.run_until_parked();
+ assert_eq!(extract_semantic_highlights(&cx.editor, &cx).len(), 1);
+
+ // Apply EMPTY rule to disable it
+ cx.update(|_, cx| {
+ SettingsStore::update_global(cx, |store, cx| {
+ store.update_user_settings(cx, |settings| {
+ settings.global_lsp_settings = Some(GlobalLspSettingsContent {
+ semantic_token_rules: Some(SemanticTokenRules {
+ rules: vec![SemanticTokenRule {
+ token_type: Some("function".to_string()),
+ ..Default::default()
+ }],
+ }),
+ ..Default::default()
+ });
+ });
+ });
+ });
+
+ cx.set_state("ˇfn main() { }");
+ full_request.next().await;
+ cx.run_until_parked();
+
+ assert!(
+ extract_semantic_highlights(&cx.editor, &cx).is_empty(),
+ "Highlighting should be disabled by empty style setting"
+ );
+ }
+
+ #[gpui::test]
+ async fn test_semantic_token_broad_rule_disables_specific_token(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+ update_test_language_settings(cx, &|s| {
+ s.languages.0.insert(
+ "Rust".into(),
+ LanguageSettingsContent {
+ semantic_tokens: Some(SemanticTokens::Full),
+ ..Default::default()
+ },
+ );
+ });
+
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ semantic_tokens_provider: Some(
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+ lsp::SemanticTokensOptions {
+ legend: lsp::SemanticTokensLegend {
+ token_types: vec!["comment".into()],
+ token_modifiers: vec!["documentation".into()],
+ },
+ full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+ ..Default::default()
+ },
+ ),
+ ),
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
+
+ let mut full_request = cx
+ .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
+ move |_, _, _| async move {
+ Ok(Some(lsp::SemanticTokensResult::Tokens(
+ lsp::SemanticTokens {
+ data: vec![0, 0, 5, 0, 1], // comment [documentation]
+ result_id: None,
+ },
+ )))
+ },
+ );
+
+ cx.set_state("ˇ/// d\n");
+ full_request.next().await;
+ cx.run_until_parked();
+ assert_eq!(
+ extract_semantic_highlights(&cx.editor, &cx).len(),
+ 1,
+ "Documentation comment should be highlighted"
+ );
+
+ // Apply a BROAD empty rule for "comment" (no modifiers)
+ cx.update(|_, cx| {
+ SettingsStore::update_global(cx, |store, cx| {
+ store.update_user_settings(cx, |settings| {
+ settings.global_lsp_settings = Some(GlobalLspSettingsContent {
+ semantic_token_rules: Some(SemanticTokenRules {
+ rules: vec![SemanticTokenRule {
+ token_type: Some("comment".to_string()),
+ ..Default::default()
+ }],
+ }),
+ ..Default::default()
+ });
+ });
+ });
+ });
+
+ cx.set_state("ˇ/// d\n");
+ full_request.next().await;
+ cx.run_until_parked();
+
+ assert!(
+ extract_semantic_highlights(&cx.editor, &cx).is_empty(),
+ "Broad empty rule should disable specific documentation comment"
+ );
+ }
+
+ #[gpui::test]
+ async fn test_semantic_token_specific_rule_does_not_disable_broad_token(
+ cx: &mut TestAppContext,
+ ) {
+ use gpui::UpdateGlobal as _;
+ use settings::{GlobalLspSettingsContent, SemanticTokenRule};
+
+ init_test(cx, |_| {});
+ update_test_language_settings(cx, &|s| {
+ s.languages.0.insert(
+ "Rust".into(),
+ LanguageSettingsContent {
+ semantic_tokens: Some(SemanticTokens::Full),
+ ..Default::default()
+ },
+ );
+ });
+
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ semantic_tokens_provider: Some(
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+ lsp::SemanticTokensOptions {
+ legend: lsp::SemanticTokensLegend {
+ token_types: vec!["comment".into()],
+ token_modifiers: vec!["documentation".into()],
+ },
+ full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+ ..Default::default()
+ },
+ ),
+ ),
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
+
+ let mut full_request = cx
+ .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
+ move |_, _, _| async move {
+ Ok(Some(lsp::SemanticTokensResult::Tokens(
+ lsp::SemanticTokens {
+ data: vec![
+ 0, 0, 5, 0, 1, // comment [documentation]
+ 1, 0, 5, 0, 0, // normal comment
+ ],
+ result_id: None,
+ },
+ )))
+ },
+ );
+
+ cx.set_state("ˇ/// d\n// n\n");
+ full_request.next().await;
+ cx.run_until_parked();
+ assert_eq!(
+ extract_semantic_highlights(&cx.editor, &cx).len(),
+ 2,
+ "Both documentation and normal comments should be highlighted initially"
+ );
+
+ // Apply a SPECIFIC empty rule for documentation only
+ cx.update(|_, cx| {
+ SettingsStore::update_global(cx, |store, cx| {
+ store.update_user_settings(cx, |settings| {
+ settings.global_lsp_settings = Some(GlobalLspSettingsContent {
+ semantic_token_rules: Some(SemanticTokenRules {
+ rules: vec![SemanticTokenRule {
+ token_type: Some("comment".to_string()),
+ token_modifiers: vec!["documentation".to_string()],
+ ..Default::default()
+ }],
+ }),
+ ..Default::default()
+ });
+ });
+ });
+ });
+
+ cx.set_state("ˇ/// d\n// n\n");
+ full_request.next().await;
+ cx.run_until_parked();
+
+ assert_eq!(
+ extract_semantic_highlights(&cx.editor, &cx).len(),
+ 1,
+ "Normal comment should still be highlighted (matched by default rule)"
+ );
+ }
+
fn extract_semantic_highlight_styles(
editor: &Entity<Editor>,
cx: &TestAppContext,
@@ -1,5 +1,5 @@
use std::{
- ops::{Bound, Range, RangeInclusive},
+ ops::{Range, RangeInclusive},
sync::Arc,
};
@@ -13,7 +13,7 @@ use gpui::{
use itertools::Itertools;
use language::{Buffer, Capability, HighlightedText};
use multi_buffer::{
- Anchor, BufferOffset, ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer,
+ Anchor, AnchorRangeExt as _, BufferOffset, ExcerptRange, ExpandExcerptDirection, MultiBuffer,
MultiBufferDiffHunk, MultiBufferPoint, MultiBufferSnapshot, PathKey,
};
use project::Project;
@@ -44,13 +44,11 @@ use crate::{
use zed_actions::assistant::InlineAssist;
pub(crate) fn convert_lhs_rows_to_rhs(
- lhs_excerpt_to_rhs_excerpt: &HashMap<ExcerptId, ExcerptId>,
rhs_snapshot: &MultiBufferSnapshot,
lhs_snapshot: &MultiBufferSnapshot,
- lhs_bounds: (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+ lhs_bounds: Range<MultiBufferPoint>,
) -> Vec<CompanionExcerptPatch> {
patches_for_range(
- lhs_excerpt_to_rhs_excerpt,
lhs_snapshot,
rhs_snapshot,
lhs_bounds,
@@ -59,13 +57,11 @@ pub(crate) fn convert_lhs_rows_to_rhs(
}
pub(crate) fn convert_rhs_rows_to_lhs(
- rhs_excerpt_to_lhs_excerpt: &HashMap<ExcerptId, ExcerptId>,
lhs_snapshot: &MultiBufferSnapshot,
rhs_snapshot: &MultiBufferSnapshot,
- rhs_bounds: (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+ rhs_bounds: Range<MultiBufferPoint>,
) -> Vec<CompanionExcerptPatch> {
patches_for_range(
- rhs_excerpt_to_lhs_excerpt,
rhs_snapshot,
lhs_snapshot,
rhs_bounds,
@@ -73,6 +69,21 @@ pub(crate) fn convert_rhs_rows_to_lhs(
)
}
+fn rhs_range_to_base_text_range(
+ rhs_range: &Range<Point>,
+ diff_snapshot: &BufferDiffSnapshot,
+ rhs_buffer_snapshot: &text::BufferSnapshot,
+) -> Range<Point> {
+ let start = diff_snapshot
+ .buffer_point_to_base_text_range(Point::new(rhs_range.start.row, 0), rhs_buffer_snapshot)
+ .start;
+ let end = diff_snapshot
+ .buffer_point_to_base_text_range(Point::new(rhs_range.end.row, 0), rhs_buffer_snapshot)
+ .end;
+ let end_column = diff_snapshot.base_text().line_len(end.row);
+ Point::new(start.row, 0)..Point::new(end.row, end_column)
+}
+
fn translate_lhs_selections_to_rhs(
selections_by_buffer: &HashMap<BufferId, (Vec<Range<BufferOffset>>, Option<u32>)>,
splittable: &SplittableEditor,
@@ -168,22 +179,18 @@ fn translate_lhs_hunks_to_rhs(
}
fn patches_for_range<F>(
- excerpt_map: &HashMap<ExcerptId, ExcerptId>,
source_snapshot: &MultiBufferSnapshot,
target_snapshot: &MultiBufferSnapshot,
- source_bounds: (Bound<MultiBufferPoint>, Bound<MultiBufferPoint>),
+ source_bounds: Range<MultiBufferPoint>,
translate_fn: F,
) -> Vec<CompanionExcerptPatch>
where
F: Fn(&BufferDiffSnapshot, RangeInclusive<Point>, &text::BufferSnapshot) -> Patch<Point>,
{
- struct PendingExcerpt<'a> {
- source_excerpt_id: ExcerptId,
- target_excerpt_id: ExcerptId,
- source_buffer: &'a text::BufferSnapshot,
- target_buffer: &'a text::BufferSnapshot,
+ struct PendingExcerpt {
+ source_buffer_snapshot: language::BufferSnapshot,
+ source_excerpt_range: ExcerptRange<text::Anchor>,
buffer_point_range: Range<Point>,
- source_context_range: Range<Point>,
}
let mut result = Vec::new();
@@ -201,41 +208,55 @@ where
};
let diff = source_snapshot
- .diff_for_buffer_id(first.source_buffer.remote_id())
+ .diff_for_buffer_id(first.source_buffer_snapshot.remote_id())
.expect("buffer with no diff when creating patches");
- let rhs_buffer = if first.source_buffer.remote_id() == diff.base_text().remote_id() {
- first.target_buffer
+ let source_is_lhs =
+ first.source_buffer_snapshot.remote_id() == diff.base_text().remote_id();
+ let target_buffer_id = if source_is_lhs {
+ diff.buffer_id()
} else {
- first.source_buffer
+ diff.base_text().remote_id()
+ };
+ let target_buffer = target_snapshot
+ .buffer_for_id(target_buffer_id)
+ .expect("missing corresponding buffer");
+ let rhs_buffer = if source_is_lhs {
+ target_buffer
+ } else {
+ &first.source_buffer_snapshot
};
let patch = translate_fn(diff, union_start..=union_end, rhs_buffer);
for excerpt in pending.drain(..) {
+ let target_position = patch.old_to_new(excerpt.buffer_point_range.start);
+ let target_position = target_buffer.anchor_before(target_position);
+ let Some(target_position) = target_snapshot.anchor_in_excerpt(target_position) else {
+ continue;
+ };
+ let Some((target_buffer_snapshot, target_excerpt_range)) =
+ target_snapshot.excerpt_containing(target_position..target_position)
+ else {
+ continue;
+ };
+
result.push(patch_for_excerpt(
source_snapshot,
target_snapshot,
- excerpt.source_excerpt_id,
- excerpt.target_excerpt_id,
- excerpt.target_buffer,
- excerpt.source_context_range,
+ &excerpt.source_buffer_snapshot,
+ target_buffer_snapshot,
+ excerpt.source_excerpt_range,
+ target_excerpt_range,
&patch,
excerpt.buffer_point_range,
));
}
};
- for (source_buffer, buffer_offset_range, source_excerpt_id, source_context_range) in
- source_snapshot.range_to_buffer_ranges_with_context(source_bounds)
+ for (buffer_snapshot, source_range, source_excerpt_range) in
+ source_snapshot.range_to_buffer_ranges(source_bounds)
{
- let Some(target_excerpt_id) = excerpt_map.get(&source_excerpt_id).copied() else {
- continue;
- };
- let Some(target_buffer) = target_snapshot.buffer_for_excerpt(target_excerpt_id) else {
- continue;
- };
-
- let buffer_id = source_buffer.remote_id();
+ let buffer_id = buffer_snapshot.remote_id();
if current_buffer_id != Some(buffer_id) {
if let (Some(start), Some(end)) = (union_context_start.take(), union_context_end.take())
@@ -245,8 +266,8 @@ where
current_buffer_id = Some(buffer_id);
}
- let buffer_point_range = buffer_offset_range.to_point(source_buffer);
- let source_context_range = source_context_range.to_point(source_buffer);
+ let buffer_point_range = source_range.to_point(&buffer_snapshot);
+ let source_context_range = source_excerpt_range.context.to_point(&buffer_snapshot);
union_context_start = Some(union_context_start.map_or(source_context_range.start, |s| {
s.min(source_context_range.start)
@@ -256,12 +277,9 @@ where
}));
pending_excerpts.push(PendingExcerpt {
- source_excerpt_id,
- target_excerpt_id,
- source_buffer,
- target_buffer,
+ source_buffer_snapshot: buffer_snapshot,
+ source_excerpt_range,
buffer_point_range,
- source_context_range,
});
}
@@ -275,55 +293,60 @@ where
fn patch_for_excerpt(
source_snapshot: &MultiBufferSnapshot,
target_snapshot: &MultiBufferSnapshot,
- source_excerpt_id: ExcerptId,
- target_excerpt_id: ExcerptId,
- target_buffer: &text::BufferSnapshot,
- source_context_range: Range<Point>,
+ source_buffer_snapshot: &language::BufferSnapshot,
+ target_buffer_snapshot: &language::BufferSnapshot,
+ source_excerpt_range: ExcerptRange<text::Anchor>,
+ target_excerpt_range: ExcerptRange<text::Anchor>,
patch: &Patch<Point>,
source_edited_range: Range<Point>,
) -> CompanionExcerptPatch {
- let source_multibuffer_range = source_snapshot
- .range_for_excerpt(source_excerpt_id)
- .expect("no excerpt for source id when creating patch");
- let source_excerpt_start_in_multibuffer = source_multibuffer_range.start;
- let source_excerpt_start_in_buffer = source_context_range.start;
- let source_excerpt_end_in_buffer = source_context_range.end;
- let target_multibuffer_range = target_snapshot
- .range_for_excerpt(target_excerpt_id)
- .expect("no excerpt for target id when creating patch");
- let target_excerpt_start_in_multibuffer = target_multibuffer_range.start;
- let target_context_range = target_snapshot
- .context_range_for_excerpt(target_excerpt_id)
- .expect("no range for target id when creating patch");
- let target_excerpt_start_in_buffer = target_context_range.start.to_point(&target_buffer);
- let target_excerpt_end_in_buffer = target_context_range.end.to_point(&target_buffer);
+ let source_buffer_range = source_excerpt_range
+ .context
+ .to_point(source_buffer_snapshot);
+ let source_multibuffer_range = (source_snapshot
+ .anchor_in_buffer(source_excerpt_range.context.start)
+ .expect("buffer should exist in multibuffer")
+ ..source_snapshot
+ .anchor_in_buffer(source_excerpt_range.context.end)
+ .expect("buffer should exist in multibuffer"))
+ .to_point(source_snapshot);
+ let target_buffer_range = target_excerpt_range
+ .context
+ .to_point(target_buffer_snapshot);
+ let target_multibuffer_range = (target_snapshot
+ .anchor_in_buffer(target_excerpt_range.context.start)
+ .expect("buffer should exist in multibuffer")
+ ..target_snapshot
+ .anchor_in_buffer(target_excerpt_range.context.end)
+ .expect("buffer should exist in multibuffer"))
+ .to_point(target_snapshot);
let edits = patch
.edits()
.iter()
- .skip_while(|edit| edit.old.end < source_excerpt_start_in_buffer)
- .take_while(|edit| edit.old.start <= source_excerpt_end_in_buffer)
+ .skip_while(|edit| edit.old.end < source_buffer_range.start)
+ .take_while(|edit| edit.old.start <= source_buffer_range.end)
.map(|edit| {
- let clamped_source_start = edit.old.start.max(source_excerpt_start_in_buffer);
- let clamped_source_end = edit.old.end.min(source_excerpt_end_in_buffer);
- let source_multibuffer_start = source_excerpt_start_in_multibuffer
- + (clamped_source_start - source_excerpt_start_in_buffer);
- let source_multibuffer_end = source_excerpt_start_in_multibuffer
- + (clamped_source_end - source_excerpt_start_in_buffer);
+ let clamped_source_start = edit.old.start.max(source_buffer_range.start);
+ let clamped_source_end = edit.old.end.min(source_buffer_range.end);
+ let source_multibuffer_start =
+ source_multibuffer_range.start + (clamped_source_start - source_buffer_range.start);
+ let source_multibuffer_end =
+ source_multibuffer_range.start + (clamped_source_end - source_buffer_range.start);
let clamped_target_start = edit
.new
.start
- .max(target_excerpt_start_in_buffer)
- .min(target_excerpt_end_in_buffer);
+ .max(target_buffer_range.start)
+ .min(target_buffer_range.end);
let clamped_target_end = edit
.new
.end
- .max(target_excerpt_start_in_buffer)
- .min(target_excerpt_end_in_buffer);
- let target_multibuffer_start = target_excerpt_start_in_multibuffer
- + (clamped_target_start - target_excerpt_start_in_buffer);
- let target_multibuffer_end = target_excerpt_start_in_multibuffer
- + (clamped_target_end - target_excerpt_start_in_buffer);
+ .max(target_buffer_range.start)
+ .min(target_buffer_range.end);
+ let target_multibuffer_start =
+ target_multibuffer_range.start + (clamped_target_start - target_buffer_range.start);
+ let target_multibuffer_end =
+ target_multibuffer_range.start + (clamped_target_end - target_buffer_range.start);
text::Edit {
old: source_multibuffer_start..source_multibuffer_end,
new: target_multibuffer_start..target_multibuffer_end,
@@ -331,8 +354,8 @@ fn patch_for_excerpt(
});
let edits = [text::Edit {
- old: source_excerpt_start_in_multibuffer..source_excerpt_start_in_multibuffer,
- new: target_excerpt_start_in_multibuffer..target_excerpt_start_in_multibuffer,
+ old: source_multibuffer_range.start..source_multibuffer_range.start,
+ new: target_multibuffer_range.start..target_multibuffer_range.start,
}]
.into_iter()
.chain(edits);
@@ -349,21 +372,20 @@ fn patch_for_excerpt(
merged_edits.push(edit);
}
- let edited_range = source_excerpt_start_in_multibuffer
- + (source_edited_range.start - source_excerpt_start_in_buffer)
- ..source_excerpt_start_in_multibuffer
- + (source_edited_range.end - source_excerpt_start_in_buffer);
+ let edited_range = source_multibuffer_range.start
+ + (source_edited_range.start - source_buffer_range.start)
+ ..source_multibuffer_range.start + (source_edited_range.end - source_buffer_range.start);
- let source_excerpt_end = source_excerpt_start_in_multibuffer
- + (source_excerpt_end_in_buffer - source_excerpt_start_in_buffer);
- let target_excerpt_end = target_excerpt_start_in_multibuffer
- + (target_excerpt_end_in_buffer - target_excerpt_start_in_buffer);
+ let source_excerpt_end =
+ source_multibuffer_range.start + (source_buffer_range.end - source_buffer_range.start);
+ let target_excerpt_end =
+ target_multibuffer_range.start + (target_buffer_range.end - target_buffer_range.start);
CompanionExcerptPatch {
patch: Patch::new(merged_edits),
edited_range,
- source_excerpt_range: source_excerpt_start_in_multibuffer..source_excerpt_end,
- target_excerpt_range: target_excerpt_start_in_multibuffer..target_excerpt_end,
+ source_excerpt_range: source_multibuffer_range.start..source_excerpt_end,
+ target_excerpt_range: target_multibuffer_range.start..target_excerpt_end,
}
}
@@ -390,6 +412,7 @@ pub struct SplittableEditor {
struct LhsEditor {
multibuffer: Entity<MultiBuffer>,
editor: Entity<Editor>,
+ companion: Entity<Companion>,
was_last_focused: bool,
_subscriptions: Vec<Subscription>,
}
@@ -470,11 +493,16 @@ impl SplittableEditor {
&rhs_editor,
|this, _, event: &EditorEvent, cx| match event {
EditorEvent::ExpandExcerptsRequested {
- excerpt_ids,
+ excerpt_anchors,
lines,
direction,
} => {
- this.expand_excerpts(excerpt_ids.iter().copied(), *lines, *direction, cx);
+ this.expand_excerpts(
+ excerpt_anchors.iter().copied(),
+ *lines,
+ *direction,
+ cx,
+ );
}
_ => cx.emit(event.clone()),
},
@@ -563,19 +591,31 @@ impl SplittableEditor {
window,
|this, _, event: &EditorEvent, window, cx| match event {
EditorEvent::ExpandExcerptsRequested {
- excerpt_ids,
+ excerpt_anchors,
lines,
direction,
} => {
- if this.lhs.is_some() {
- let rhs_display_map = this.rhs_editor.read(cx).display_map.read(cx);
- let rhs_ids: Vec<_> = excerpt_ids
+ if let Some(lhs) = &this.lhs {
+ let rhs_snapshot = this.rhs_multibuffer.read(cx).snapshot(cx);
+ let lhs_snapshot = lhs.multibuffer.read(cx).snapshot(cx);
+ let rhs_anchors = excerpt_anchors
.iter()
- .filter_map(|id| {
- rhs_display_map.companion_excerpt_to_my_excerpt(*id, cx)
+ .filter_map(|anchor| {
+ let (anchor, lhs_buffer) =
+ lhs_snapshot.anchor_to_buffer_anchor(*anchor)?;
+ let rhs_buffer_id =
+ lhs.companion.read(cx).lhs_to_rhs_buffer(anchor.buffer_id)?;
+ let rhs_buffer = rhs_snapshot.buffer_for_id(rhs_buffer_id)?;
+ let diff = this.rhs_multibuffer.read(cx).diff_for(rhs_buffer_id)?;
+ let diff_snapshot = diff.read(cx).snapshot(cx);
+ let rhs_point = diff_snapshot.base_text_point_to_buffer_point(
+ anchor.to_point(&lhs_buffer),
+ &rhs_buffer,
+ );
+ rhs_snapshot.anchor_in_excerpt(rhs_buffer.anchor_before(rhs_point))
})
- .collect();
- this.expand_excerpts(rhs_ids.into_iter(), *lines, *direction, cx);
+ .collect::<Vec<_>>();
+ this.expand_excerpts(rhs_anchors.into_iter(), *lines, *direction, cx);
}
}
EditorEvent::StageOrUnstageRequested { stage, hunks } => {
@@ -654,15 +694,23 @@ impl SplittableEditor {
}),
);
+ let rhs_display_map = self.rhs_editor.read(cx).display_map.clone();
+ let lhs_display_map = lhs_editor.read(cx).display_map.clone();
+ let rhs_display_map_id = rhs_display_map.entity_id();
+ let companion = cx.new(|_| {
+ Companion::new(
+ rhs_display_map_id,
+ convert_rhs_rows_to_lhs,
+ convert_lhs_rows_to_rhs,
+ )
+ });
let lhs = LhsEditor {
editor: lhs_editor,
multibuffer: lhs_multibuffer,
was_last_focused: false,
+ companion: companion.clone(),
_subscriptions: subscriptions,
};
- let rhs_display_map = self.rhs_editor.read(cx).display_map.clone();
- let lhs_display_map = lhs.editor.read(cx).display_map.clone();
- let rhs_display_map_id = rhs_display_map.entity_id();
self.rhs_editor.update(cx, |editor, cx| {
editor.set_delegate_expand_excerpts(true);
@@ -672,35 +720,21 @@ impl SplittableEditor {
})
});
- let path_diffs: Vec<_> = {
+ let all_paths: Vec<_> = {
let rhs_multibuffer = self.rhs_multibuffer.read(cx);
- rhs_multibuffer
- .paths()
- .filter_map(|path| {
- let excerpt_id = rhs_multibuffer.excerpts_for_path(path).next()?;
- let snapshot = rhs_multibuffer.snapshot(cx);
- let buffer = snapshot.buffer_for_excerpt(excerpt_id)?;
+ let rhs_multibuffer_snapshot = rhs_multibuffer.snapshot(cx);
+ rhs_multibuffer_snapshot
+ .buffers_with_paths()
+ .filter_map(|(buffer, path)| {
let diff = rhs_multibuffer.diff_for(buffer.remote_id())?;
Some((path.clone(), diff))
})
.collect()
};
- let companion = cx.new(|_| {
- Companion::new(
- rhs_display_map_id,
- convert_rhs_rows_to_lhs,
- convert_lhs_rows_to_rhs,
- )
- });
-
self.lhs = Some(lhs);
- let paths_for_sync: Vec<_> = path_diffs
- .into_iter()
- .map(|(path, diff)| (path, vec![], diff))
- .collect();
- self.sync_lhs_for_paths(paths_for_sync, &companion, cx);
+ self.sync_lhs_for_paths(all_paths, &companion, cx);
rhs_display_map.update(cx, |dm, cx| {
dm.set_companion(Some((lhs_display_map, companion.clone())), cx);
@@ -1004,7 +1038,7 @@ impl SplittableEditor {
cx.notify();
}
- pub fn set_excerpts_for_path(
+ pub fn update_excerpts_for_path(
&mut self,
path: PathKey,
buffer: Entity<Buffer>,
@@ -1012,122 +1046,94 @@ impl SplittableEditor {
context_line_count: u32,
diff: Entity<BufferDiff>,
cx: &mut Context<Self>,
- ) -> (Vec<Range<Anchor>>, bool) {
+ ) -> bool {
+ let has_ranges = ranges.clone().into_iter().next().is_some();
let Some(companion) = self.companion(cx) else {
return self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
- let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path(
+ let added_a_new_excerpt = rhs_multibuffer.update_excerpts_for_path(
path,
buffer.clone(),
ranges,
context_line_count,
cx,
);
- if !anchors.is_empty()
+ if has_ranges
&& rhs_multibuffer
.diff_for(buffer.read(cx).remote_id())
.is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id())
{
rhs_multibuffer.add_diff(diff, cx);
}
- (anchors, added_a_new_excerpt)
+ added_a_new_excerpt
});
};
- let old_rhs_ids: Vec<ExcerptId> = self
- .rhs_multibuffer
- .read(cx)
- .excerpts_for_path(&path)
- .collect();
-
let result = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
- let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path(
+ let added_a_new_excerpt = rhs_multibuffer.update_excerpts_for_path(
path.clone(),
buffer.clone(),
ranges,
context_line_count,
cx,
);
- if !anchors.is_empty()
+ if has_ranges
&& rhs_multibuffer
.diff_for(buffer.read(cx).remote_id())
.is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id())
{
rhs_multibuffer.add_diff(diff.clone(), cx);
}
- (anchors, added_a_new_excerpt)
+ added_a_new_excerpt
});
- self.sync_lhs_for_paths(vec![(path, old_rhs_ids, diff)], &companion, cx);
+ self.sync_lhs_for_paths(vec![(path, diff)], &companion, cx);
result
}
fn expand_excerpts(
&mut self,
- excerpt_ids: impl Iterator<Item = ExcerptId> + Clone,
+ excerpt_anchors: impl Iterator<Item = Anchor> + Clone,
lines: u32,
direction: ExpandExcerptDirection,
cx: &mut Context<Self>,
) {
let Some(companion) = self.companion(cx) else {
self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
- rhs_multibuffer.expand_excerpts(excerpt_ids, lines, direction, cx);
+ rhs_multibuffer.expand_excerpts(excerpt_anchors, lines, direction, cx);
});
return;
};
- let paths_with_old_ids: Vec<_> = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
+ let paths: Vec<_> = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
let snapshot = rhs_multibuffer.snapshot(cx);
- let paths = excerpt_ids
+ let paths = excerpt_anchors
.clone()
- .filter_map(|excerpt_id| {
- let path = rhs_multibuffer.path_for_excerpt(excerpt_id)?;
- let buffer = snapshot.buffer_for_excerpt(excerpt_id)?;
- let diff = rhs_multibuffer.diff_for(buffer.remote_id())?;
- Some((path, diff))
+ .filter_map(|anchor| {
+ let (anchor, _) = snapshot.anchor_to_buffer_anchor(anchor)?;
+ let path = snapshot.path_for_buffer(anchor.buffer_id)?;
+ let diff = rhs_multibuffer.diff_for(anchor.buffer_id)?;
+ Some((path.clone(), diff))
})
.collect::<HashMap<_, _>>()
.into_iter()
- .map(|(path, diff)| {
- let old_ids = rhs_multibuffer.excerpts_for_path(&path).collect();
- (path, old_ids, diff)
- })
.collect();
- rhs_multibuffer.expand_excerpts(excerpt_ids, lines, direction, cx);
+ rhs_multibuffer.expand_excerpts(excerpt_anchors, lines, direction, cx);
paths
});
- self.sync_lhs_for_paths(paths_with_old_ids, &companion, cx);
+ self.sync_lhs_for_paths(paths, &companion, cx);
}
pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context<Self>) {
- let Some(lhs) = &self.lhs else {
- self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
- rhs_multibuffer.remove_excerpts_for_path(path, cx);
- });
- return;
- };
-
- let rhs_excerpt_ids: Vec<ExcerptId> = self
- .rhs_multibuffer
- .read(cx)
- .excerpts_for_path(&path)
- .collect();
- let lhs_excerpt_ids: Vec<ExcerptId> =
- lhs.multibuffer.read(cx).excerpts_for_path(&path).collect();
+ self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
+ rhs_multibuffer.remove_excerpts(path.clone(), cx);
+ });
- let rhs_display_map = self.rhs_editor.read(cx).display_map.clone();
- if let Some(companion) = rhs_display_map.read(cx).companion().cloned() {
- companion.update(cx, |c, _| {
- c.remove_excerpt_mappings(lhs_excerpt_ids, rhs_excerpt_ids);
+ if let Some(lhs) = &self.lhs {
+ lhs.multibuffer.update(cx, |lhs_multibuffer, cx| {
+ lhs_multibuffer.remove_excerpts(path, cx);
});
}
-
- self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
- rhs_multibuffer.remove_excerpts_for_path(path.clone(), cx);
- });
- lhs.multibuffer.update(cx, |lhs_multibuffer, cx| {
- lhs_multibuffer.remove_excerpts_for_path(path, cx);
- });
}
fn search_token(&self) -> SearchToken {
@@ -1151,122 +1157,95 @@ impl SplittableEditor {
fn sync_lhs_for_paths(
&self,
- paths_with_old_rhs_ids: Vec<(PathKey, Vec<ExcerptId>, Entity<BufferDiff>)>,
+ paths: Vec<(PathKey, Entity<BufferDiff>)>,
companion: &Entity<Companion>,
cx: &mut Context<Self>,
) {
let Some(lhs) = &self.lhs else { return };
self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| {
- for (path, old_rhs_ids, diff) in paths_with_old_rhs_ids {
- let old_lhs_ids: Vec<ExcerptId> =
- lhs.multibuffer.read(cx).excerpts_for_path(&path).collect();
-
- companion.update(cx, |c, _| {
- c.remove_excerpt_mappings(old_lhs_ids, old_rhs_ids);
- });
-
- let rhs_excerpt_ids: Vec<ExcerptId> =
- rhs_multibuffer.excerpts_for_path(&path).collect();
- let Some(excerpt_id) = rhs_excerpt_ids.first().copied() else {
+ for (path, diff) in paths {
+ let main_buffer_id = diff.read(cx).buffer_id;
+ let Some(main_buffer) = rhs_multibuffer.buffer(diff.read(cx).buffer_id) else {
lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| {
- lhs_multibuffer.remove_excerpts_for_path(path, lhs_cx);
+ lhs_multibuffer.remove_excerpts(path, lhs_cx);
});
continue;
};
- let Some(main_buffer_snapshot) = rhs_multibuffer
- .snapshot(cx)
- .buffer_for_excerpt(excerpt_id)
- .cloned()
- else {
- continue;
- };
- let Some(main_buffer) = rhs_multibuffer.buffer(main_buffer_snapshot.remote_id())
- else {
- continue;
- };
+ let main_buffer_snapshot = main_buffer.read(cx).snapshot();
let base_text_buffer = diff.read(cx).base_text_buffer().clone();
let diff_snapshot = diff.read(cx).snapshot(cx);
let base_text_buffer_snapshot = base_text_buffer.read(cx).snapshot();
- let lhs_ranges: Vec<ExcerptRange<Point>> = rhs_multibuffer
- .excerpts_for_buffer(main_buffer_snapshot.remote_id(), cx)
+ let mut paired_ranges: Vec<(Range<Point>, ExcerptRange<text::Anchor>)> = Vec::new();
+
+ let mut have_excerpt = false;
+ let mut did_merge = false;
+ let rhs_multibuffer_snapshot = rhs_multibuffer.snapshot(cx);
+ for info in rhs_multibuffer_snapshot.excerpts_for_buffer(main_buffer_id) {
+ have_excerpt = true;
+ let rhs_context = info.context.to_point(&main_buffer_snapshot);
+ let lhs_context = rhs_range_to_base_text_range(
+ &rhs_context,
+ &diff_snapshot,
+ &main_buffer_snapshot,
+ );
+
+ if let Some((prev_lhs_context, prev_rhs_range)) = paired_ranges.last_mut()
+ && prev_lhs_context.end >= lhs_context.start
+ {
+ did_merge = true;
+ prev_lhs_context.end = lhs_context.end;
+ prev_rhs_range.context.end = info.context.end;
+ continue;
+ }
+
+ paired_ranges.push((lhs_context, info));
+ }
+
+ let (lhs_ranges, rhs_ranges): (Vec<_>, Vec<_>) = paired_ranges.into_iter().unzip();
+ let lhs_ranges = lhs_ranges
.into_iter()
- .filter(|(id, _, _)| rhs_excerpt_ids.contains(id))
- .map(|(_, _, excerpt_range)| {
- let to_base_text = |range: Range<Point>| {
- let start = diff_snapshot
- .buffer_point_to_base_text_range(
- Point::new(range.start.row, 0),
- &main_buffer_snapshot,
- )
- .start;
- let end = diff_snapshot
- .buffer_point_to_base_text_range(
- Point::new(range.end.row, 0),
- &main_buffer_snapshot,
- )
- .end;
- let end_column = diff_snapshot.base_text().line_len(end.row);
- Point::new(start.row, 0)..Point::new(end.row, end_column)
- };
- let primary = excerpt_range.primary.to_point(&main_buffer_snapshot);
- let context = excerpt_range.context.to_point(&main_buffer_snapshot);
- ExcerptRange {
- primary: to_base_text(primary),
- context: to_base_text(context),
- }
+ .map(|range| {
+ ExcerptRange::new(base_text_buffer_snapshot.anchor_range_outside(range))
})
- .collect();
+ .collect::<Vec<_>>();
- let groups = lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| {
- let lhs_result = lhs_multibuffer.update_path_excerpts(
- path,
+ lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| {
+ lhs_multibuffer.update_path_excerpts(
+ path.clone(),
base_text_buffer,
&base_text_buffer_snapshot,
- lhs_ranges,
+ &lhs_ranges,
lhs_cx,
);
- if !lhs_result.excerpt_ids.is_empty()
+ if have_excerpt
&& lhs_multibuffer
.diff_for(base_text_buffer_snapshot.remote_id())
.is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id())
{
- lhs_multibuffer.add_inverted_diff(diff.clone(), main_buffer, lhs_cx);
- }
-
- let mut groups = Vec::new();
- for (lhs_id, chunk) in &lhs_result
- .excerpt_ids
- .iter()
- .copied()
- .zip(rhs_excerpt_ids)
- .chunk_by(|(lhs_id, _)| *lhs_id)
- {
- groups.push((lhs_id, chunk.map(|(_, rhs_id)| rhs_id).collect::<Vec<_>>()));
+ lhs_multibuffer.add_inverted_diff(
+ diff.clone(),
+ main_buffer.clone(),
+ lhs_cx,
+ );
}
- groups
});
- let pairs = groups
- .into_iter()
- .map(|(lhs_id, rhs_group)| {
- let rhs_id = if rhs_group.len() == 1 {
- rhs_group[0]
- } else {
- rhs_multibuffer.merge_excerpts(&rhs_group, cx)
- };
- (lhs_id, rhs_id)
- })
- .collect::<Vec<_>>();
+ if did_merge {
+ rhs_multibuffer.update_path_excerpts(
+ path,
+ main_buffer,
+ &main_buffer_snapshot,
+ &rhs_ranges,
+ cx,
+ );
+ }
let lhs_buffer_id = diff.read(cx).base_text(cx).remote_id();
let rhs_buffer_id = diff.read(cx).buffer_id;
companion.update(cx, |c, _| {
- for (lhs_id, rhs_id) in pairs {
- c.add_excerpt_mapping(lhs_id, rhs_id);
- }
c.add_buffer_mapping(lhs_buffer_id, rhs_buffer_id);
});
}
@@ -1312,7 +1291,7 @@ impl SplittableEditor {
use crate::display_map::DisplayRow;
self.debug_print(cx);
- self.check_excerpt_mapping_invariants(cx);
+ self.check_excerpt_invariants(quiesced, cx);
let lhs = self.lhs.as_ref().unwrap();
@@ -1362,15 +1341,21 @@ impl SplittableEditor {
let (lhs_point, rhs_point) =
if lhs_hunk.row_range.is_empty() || rhs_hunk.row_range.is_empty() {
+ use multi_buffer::ToPoint as _;
+
let lhs_end = Point::new(lhs_hunk.row_range.end.0, 0);
let rhs_end = Point::new(rhs_hunk.row_range.end.0, 0);
- let lhs_exceeds = lhs_snapshot
- .range_for_excerpt(lhs_hunk.excerpt_id)
- .map_or(false, |range| lhs_end >= range.end);
- let rhs_exceeds = rhs_snapshot
- .range_for_excerpt(rhs_hunk.excerpt_id)
- .map_or(false, |range| rhs_end >= range.end);
+ let lhs_excerpt_end = lhs_snapshot
+ .anchor_in_excerpt(lhs_hunk.excerpt_range.context.end)
+ .unwrap()
+ .to_point(&lhs_snapshot);
+ let lhs_exceeds = lhs_end >= lhs_excerpt_end;
+ let rhs_excerpt_end = rhs_snapshot
+ .anchor_in_excerpt(rhs_hunk.excerpt_range.context.end)
+ .unwrap()
+ .to_point(&rhs_snapshot);
+ let rhs_exceeds = rhs_end >= rhs_excerpt_end;
if lhs_exceeds != rhs_exceeds {
continue;
}
@@ -1664,109 +1649,53 @@ impl SplittableEditor {
eprintln!();
}
- fn check_excerpt_mapping_invariants(&self, cx: &gpui::App) {
- use multi_buffer::{ExcerptId, PathKey};
-
+ fn check_excerpt_invariants(&self, quiesced: bool, cx: &gpui::App) {
let lhs = self.lhs.as_ref().expect("should have lhs editor");
- let rhs_excerpt_ids = self.rhs_multibuffer.read(cx).excerpt_ids();
- let lhs_excerpt_ids = lhs.multibuffer.read(cx).excerpt_ids();
- assert_eq!(
- rhs_excerpt_ids.len(),
- lhs_excerpt_ids.len(),
- "excerpt count mismatch: rhs has {}, lhs has {}",
- rhs_excerpt_ids.len(),
- lhs_excerpt_ids.len(),
- );
-
- let rhs_display_map = self.rhs_editor.read(cx).display_map.clone();
- let companion = rhs_display_map
- .read(cx)
- .companion()
- .cloned()
- .expect("should have companion");
- let (lhs_to_rhs, rhs_to_lhs) = {
- let c = companion.read(cx);
- let (l, r) = c.excerpt_mappings();
- (l.clone(), r.clone())
- };
-
- assert_eq!(
- lhs_to_rhs.len(),
- rhs_to_lhs.len(),
- "mapping size mismatch: lhs_to_rhs has {}, rhs_to_lhs has {}",
- lhs_to_rhs.len(),
- rhs_to_lhs.len(),
- );
+ let rhs_snapshot = self.rhs_multibuffer.read(cx).snapshot(cx);
+ let rhs_excerpts = rhs_snapshot.excerpts().collect::<Vec<_>>();
+ let lhs_snapshot = lhs.multibuffer.read(cx).snapshot(cx);
+ let lhs_excerpts = lhs_snapshot.excerpts().collect::<Vec<_>>();
+ assert_eq!(lhs_excerpts.len(), rhs_excerpts.len());
- for (&lhs_id, &rhs_id) in &lhs_to_rhs {
- let reverse = rhs_to_lhs.get(&rhs_id);
- assert_eq!(
- reverse,
- Some(&lhs_id),
- "lhs_to_rhs maps {lhs_id:?} -> {rhs_id:?}, but rhs_to_lhs maps {rhs_id:?} -> {reverse:?}",
- );
- }
- for (&rhs_id, &lhs_id) in &rhs_to_lhs {
- let reverse = lhs_to_rhs.get(&lhs_id);
+ for (lhs_excerpt, rhs_excerpt) in lhs_excerpts.into_iter().zip(rhs_excerpts) {
assert_eq!(
- reverse,
- Some(&rhs_id),
- "rhs_to_lhs maps {rhs_id:?} -> {lhs_id:?}, but lhs_to_rhs maps {lhs_id:?} -> {reverse:?}",
+ lhs_snapshot
+ .path_for_buffer(lhs_excerpt.context.start.buffer_id)
+ .unwrap(),
+ rhs_snapshot
+ .path_for_buffer(rhs_excerpt.context.start.buffer_id)
+ .unwrap(),
+ "corresponding excerpts should have the same path"
);
- }
-
- assert_eq!(
- lhs_to_rhs.len(),
- rhs_excerpt_ids.len(),
- "mapping covers {} excerpts but rhs has {}",
- lhs_to_rhs.len(),
- rhs_excerpt_ids.len(),
- );
-
- let rhs_mapped_order: Vec<ExcerptId> = rhs_excerpt_ids
- .iter()
- .map(|rhs_id| {
- *rhs_to_lhs.get(rhs_id).unwrap_or_else(|| {
- panic!("rhs excerpt {rhs_id:?} has no mapping in rhs_to_lhs")
- })
- })
- .collect();
- assert_eq!(
- rhs_mapped_order, lhs_excerpt_ids,
- "excerpt ordering mismatch: mapping rhs order through rhs_to_lhs doesn't match lhs order",
- );
-
- let rhs_paths: Vec<PathKey> = self.rhs_multibuffer.read(cx).paths().cloned().collect();
- let lhs_paths: Vec<PathKey> = lhs.multibuffer.read(cx).paths().cloned().collect();
- assert_eq!(
- rhs_paths, lhs_paths,
- "path set mismatch between rhs and lhs"
- );
-
- for path in &rhs_paths {
- let rhs_path_excerpts: Vec<ExcerptId> = self
+ let diff = self
.rhs_multibuffer
.read(cx)
- .excerpts_for_path(path)
- .collect();
- let lhs_path_excerpts: Vec<ExcerptId> =
- lhs.multibuffer.read(cx).excerpts_for_path(path).collect();
+ .diff_for(rhs_excerpt.context.start.buffer_id)
+ .expect("missing diff");
assert_eq!(
- rhs_path_excerpts.len(),
- lhs_path_excerpts.len(),
- "excerpt count mismatch for path {path:?}: rhs has {}, lhs has {}",
- rhs_path_excerpts.len(),
- lhs_path_excerpts.len(),
- );
- let rhs_path_mapped: Vec<ExcerptId> = rhs_path_excerpts
- .iter()
- .map(|rhs_id| *rhs_to_lhs.get(rhs_id).unwrap())
- .collect();
- assert_eq!(
- rhs_path_mapped, lhs_path_excerpts,
- "per-path excerpt ordering mismatch for {path:?}",
+ lhs_excerpt.context.start.buffer_id,
+ diff.read(cx).base_text(cx).remote_id(),
+ "corresponding lhs excerpt should show diff base text"
);
+
+ if quiesced {
+ let diff_snapshot = diff.read(cx).snapshot(cx);
+ let lhs_buffer_snapshot = lhs_snapshot
+ .buffer_for_id(lhs_excerpt.context.start.buffer_id)
+ .unwrap();
+ let rhs_buffer_snapshot = rhs_snapshot
+ .buffer_for_id(rhs_excerpt.context.start.buffer_id)
+ .unwrap();
+ let lhs_range = lhs_excerpt.context.to_point(&lhs_buffer_snapshot);
+ let rhs_range = rhs_excerpt.context.to_point(&rhs_buffer_snapshot);
+ let expected_lhs_range =
+ rhs_range_to_base_text_range(&rhs_range, &diff_snapshot, &rhs_buffer_snapshot);
+ assert_eq!(
+ lhs_range, expected_lhs_range,
+ "corresponding lhs excerpt should have a matching range"
+ )
+ }
}
}
}
@@ -2316,7 +2245,7 @@ mod tests {
let context_lines = rng.random_range(0..2);
editor.update(cx, |editor, cx| {
let path = PathKey::for_buffer(&buffer, cx);
- editor.set_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx);
+ editor.update_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx);
});
editor.update(cx, |editor, cx| {
editor.check_invariants(true, cx);
@@ -2351,7 +2280,14 @@ mod tests {
let context_lines = rng.random_range(0..2);
editor.update(cx, |editor, cx| {
let path = PathKey::for_buffer(&buffer, cx);
- editor.set_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx);
+ editor.update_excerpts_for_path(
+ path,
+ buffer,
+ ranges,
+ context_lines,
+ diff,
+ cx,
+ );
});
}
15..=29 => {
@@ -7,7 +7,7 @@ use gpui::{
ParentElement, Pixels, StatefulInteractiveElement, Styled, TextStyleRefinement, Window, div,
linear_color_stop, linear_gradient, point, px, size,
};
-use multi_buffer::{Anchor, ExcerptId};
+use multi_buffer::{Anchor, ExcerptBoundaryInfo};
use settings::Settings;
use smallvec::smallvec;
use text::BufferId;
@@ -429,7 +429,7 @@ impl SplitBufferHeadersElement {
let sticky_header_excerpt_id = snapshot
.sticky_header_excerpt(scroll_position.y)
- .map(|e| e.excerpt.id);
+ .map(|e| e.excerpt);
let non_sticky_headers = self.build_non_sticky_headers(
&snapshot,
@@ -476,9 +476,10 @@ impl SplitBufferHeadersElement {
let mut anchors_by_buffer: HashMap<BufferId, (usize, Anchor)> = HashMap::default();
for selection in all_anchor_selections.iter() {
let head = selection.head();
- if let Some(buffer_id) = head.text_anchor.buffer_id {
+ if let Some((text_anchor, _)) = snapshot.buffer_snapshot().anchor_to_buffer_anchor(head)
+ {
anchors_by_buffer
- .entry(buffer_id)
+ .entry(text_anchor.buffer_id)
.and_modify(|(latest_id, latest_anchor)| {
if selection.id > *latest_id {
*latest_id = selection.id;
@@ -520,7 +521,7 @@ impl SplitBufferHeadersElement {
);
let editor_bg_color = cx.theme().colors().editor_background;
- let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
+ let selected = selected_buffer_ids.contains(&excerpt.buffer_id());
let mut header = v_flex()
.id("sticky-buffer-header")
@@ -594,7 +595,7 @@ impl SplitBufferHeadersElement {
end_row: DisplayRow,
selected_buffer_ids: &HashSet<BufferId>,
latest_selection_anchors: &HashMap<BufferId, Anchor>,
- sticky_header_excerpt_id: Option<ExcerptId>,
+ sticky_header: Option<&ExcerptBoundaryInfo>,
window: &mut Window,
cx: &mut App,
) -> Vec<BufferHeaderLayout> {
@@ -603,7 +604,7 @@ impl SplitBufferHeadersElement {
for (block_row, block) in snapshot.blocks_in_range(start_row..end_row) {
let (excerpt, is_folded) = match block {
Block::BufferHeader { excerpt, .. } => {
- if sticky_header_excerpt_id == Some(excerpt.id) {
+ if sticky_header == Some(excerpt) {
continue;
}
(excerpt, false)
@@ -613,7 +614,7 @@ impl SplitBufferHeadersElement {
Block::ExcerptBoundary { .. } | Block::Custom(_) | Block::Spacer { .. } => continue,
};
- let selected = selected_buffer_ids.contains(&excerpt.buffer_id);
+ let selected = selected_buffer_ids.contains(&excerpt.buffer_id());
let jump_data = header_jump_data(
snapshot,
block_row,
@@ -0,0 +1,101 @@
+use crate::Editor;
+
+use collections::HashMap;
+use gpui::{App, Task, Window};
+use lsp::LanguageServerName;
+use project::{Location, project_settings::ProjectSettings};
+use settings::Settings as _;
+use task::{TaskContext, TaskVariables, VariableName};
+use text::{BufferId, ToOffset, ToPoint};
+
+impl Editor {
+ pub fn task_context(&self, window: &mut Window, cx: &mut App) -> Task<Option<TaskContext>> {
+ let Some(project) = self.project.clone() else {
+ return Task::ready(None);
+ };
+ let display_snapshot = self.display_snapshot(cx);
+ let selection = self.selections.newest_adjusted(&display_snapshot);
+ let start = display_snapshot
+ .buffer_snapshot()
+ .anchor_after(selection.start);
+ let end = display_snapshot
+ .buffer_snapshot()
+ .anchor_after(selection.end);
+ let Some((buffer_snapshot, range)) = display_snapshot
+ .buffer_snapshot()
+ .anchor_range_to_buffer_anchor_range(start..end)
+ else {
+ return Task::ready(None);
+ };
+ let Some(buffer) = self.buffer.read(cx).buffer(buffer_snapshot.remote_id()) else {
+ return Task::ready(None);
+ };
+ let location = Location { buffer, range };
+ let captured_variables = {
+ let mut variables = TaskVariables::default();
+ let buffer = location.buffer.read(cx);
+ let buffer_id = buffer.remote_id();
+ let snapshot = buffer.snapshot();
+ let starting_point = location.range.start.to_point(&snapshot);
+ let starting_offset = starting_point.to_offset(&snapshot);
+ for (_, tasks) in self
+ .tasks
+ .range((buffer_id, 0)..(buffer_id, starting_point.row + 1))
+ {
+ if !tasks
+ .context_range
+ .contains(&crate::BufferOffset(starting_offset))
+ {
+ continue;
+ }
+ for (capture_name, value) in tasks.extra_variables.iter() {
+ variables.insert(
+ VariableName::Custom(capture_name.to_owned().into()),
+ value.clone(),
+ );
+ }
+ }
+ variables
+ };
+
+ project.update(cx, |project, cx| {
+ project.task_store().update(cx, |task_store, cx| {
+ task_store.task_context_for_location(captured_variables, location, cx)
+ })
+ })
+ }
+
+ pub fn lsp_task_sources(&self, cx: &App) -> HashMap<LanguageServerName, Vec<BufferId>> {
+ let lsp_settings = &ProjectSettings::get_global(cx).lsp;
+
+ self.buffer()
+ .read(cx)
+ .all_buffers()
+ .into_iter()
+ .filter_map(|buffer| {
+ let lsp_tasks_source = buffer
+ .read(cx)
+ .language()?
+ .context_provider()?
+ .lsp_task_source()?;
+ if lsp_settings
+ .get(&lsp_tasks_source)
+ .is_none_or(|s| s.enable_lsp_tasks)
+ {
+ let buffer_id = buffer.read(cx).remote_id();
+ Some((lsp_tasks_source, buffer_id))
+ } else {
+ None
+ }
+ })
+ .fold(
+ HashMap::default(),
+ |mut acc, (lsp_task_source, buffer_id)| {
+ acc.entry(lsp_task_source)
+ .or_insert_with(Vec::new)
+ .push(buffer_id);
+ acc
+ },
+ )
+ }
+}
@@ -245,7 +245,7 @@ pub fn editor_content_with_blocks_and_size(
format!(
"§ {}",
first_excerpt
- .buffer
+ .buffer(snapshot.buffer_snapshot())
.file()
.map(|file| file.file_name(cx))
.unwrap_or("<no file>")
@@ -274,7 +274,7 @@ pub fn editor_content_with_blocks_and_size(
format!(
"§ {}",
excerpt
- .buffer
+ .buffer(snapshot.buffer_snapshot())
.file()
.map(|file| file.file_name(cx))
.unwrap_or("<no file>")
@@ -1,5 +1,5 @@
use crate::{
- AnchorRangeExt, DisplayPoint, Editor, ExcerptId, MultiBuffer, MultiBufferSnapshot, RowExt,
+ DisplayPoint, Editor, MultiBuffer, MultiBufferSnapshot, RowExt,
display_map::{HighlightKey, ToDisplayPoint},
};
use buffer_diff::DiffHunkStatusKind;
@@ -13,7 +13,9 @@ use gpui::{
};
use itertools::Itertools;
use language::{Buffer, BufferSnapshot, LanguageRegistry};
-use multi_buffer::{Anchor, ExcerptRange, MultiBufferOffset, MultiBufferRow, PathKey};
+use multi_buffer::{
+ Anchor, AnchorRangeExt, ExcerptRange, MultiBufferOffset, MultiBufferRow, PathKey,
+};
use parking_lot::RwLock;
use project::{FakeFs, Project};
use std::{
@@ -464,7 +466,21 @@ impl EditorTestContext {
let selections = editor.selections.disjoint_anchors_arc();
let excerpts = multibuffer_snapshot
.excerpts()
- .map(|(e_id, snapshot, range)| (e_id, snapshot.clone(), range))
+ .map(|info| {
+ (
+ multibuffer_snapshot
+ .buffer_for_id(info.context.start.buffer_id)
+ .cloned()
+ .unwrap(),
+ multibuffer_snapshot
+ .anchor_in_excerpt(info.context.start)
+ .unwrap()
+ ..multibuffer_snapshot
+ .anchor_in_excerpt(info.context.end)
+ .unwrap(),
+ info,
+ )
+ })
.collect::<Vec<_>>();
(multibuffer_snapshot, selections, excerpts)
@@ -478,14 +494,23 @@ impl EditorTestContext {
fmt_additional_notes(),
);
- for (ix, (excerpt_id, snapshot, range)) in excerpts.into_iter().enumerate() {
+ for (ix, (snapshot, multibuffer_range, excerpt_range)) in excerpts.into_iter().enumerate() {
let is_folded = self
.update_editor(|editor, _, cx| editor.is_buffer_folded(snapshot.remote_id(), cx));
let (expected_text, expected_selections) =
marked_text_ranges(expected_excerpts[ix], true);
if expected_text == "[FOLDED]\n" {
assert!(is_folded, "excerpt {} should be folded", ix);
- let is_selected = selections.iter().any(|s| s.head().excerpt_id == excerpt_id);
+ let is_selected = selections.iter().any(|s| {
+ multibuffer_range
+ .start
+ .cmp(&s.head(), &multibuffer_snapshot)
+ .is_le()
+ && multibuffer_range
+ .end
+ .cmp(&s.head(), &multibuffer_snapshot)
+ .is_ge()
+ });
if !expected_selections.is_empty() {
assert!(
is_selected,
@@ -510,7 +535,7 @@ impl EditorTestContext {
);
assert_eq!(
multibuffer_snapshot
- .text_for_range(Anchor::range_in_buffer(excerpt_id, range.context.clone()))
+ .text_for_range(multibuffer_range.clone())
.collect::<String>(),
expected_text,
"{}",
@@ -519,13 +544,24 @@ impl EditorTestContext {
let selections = selections
.iter()
- .filter(|s| s.head().excerpt_id == excerpt_id)
- .map(|s| {
- let head = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot)
- - text::ToOffset::to_offset(&range.context.start, &snapshot);
- let tail = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot)
- - text::ToOffset::to_offset(&range.context.start, &snapshot);
- tail..head
+ .filter(|s| {
+ multibuffer_range
+ .start
+ .cmp(&s.head(), &multibuffer_snapshot)
+ .is_le()
+ && multibuffer_range
+ .end
+ .cmp(&s.head(), &multibuffer_snapshot)
+ .is_ge()
+ })
+ .filter_map(|s| {
+ let (head_anchor, buffer_snapshot) =
+ multibuffer_snapshot.anchor_to_buffer_anchor(s.head())?;
+ let head = text::ToOffset::to_offset(&head_anchor, buffer_snapshot)
+ - text::ToOffset::to_offset(&excerpt_range.context.start, buffer_snapshot);
+ let tail = text::ToOffset::to_offset(&head_anchor, buffer_snapshot)
+ - text::ToOffset::to_offset(&excerpt_range.context.start, buffer_snapshot);
+ Some(tail..head)
})
.collect::<Vec<_>>();
// todo: selections that cross excerpt boundaries..
@@ -546,9 +582,12 @@ impl EditorTestContext {
let selections = editor.selections.disjoint_anchors_arc().to_vec();
let excerpts = multibuffer_snapshot
.excerpts()
- .map(|(e_id, snapshot, range)| {
- let is_folded = editor.is_buffer_folded(snapshot.remote_id(), cx);
- (e_id, snapshot.clone(), range, is_folded)
+ .map(|info| {
+ let buffer_snapshot = multibuffer_snapshot
+ .buffer_for_id(info.context.start.buffer_id)
+ .unwrap();
+ let is_folded = editor.is_buffer_folded(buffer_snapshot.remote_id(), cx);
+ (buffer_snapshot.clone(), info, is_folded)
})
.collect::<Vec<_>>();
@@ -673,7 +712,7 @@ impl EditorTestContext {
struct FormatMultiBufferAsMarkedText {
multibuffer_snapshot: MultiBufferSnapshot,
selections: Vec<Selection<Anchor>>,
- excerpts: Vec<(ExcerptId, BufferSnapshot, ExcerptRange<text::Anchor>, bool)>,
+ excerpts: Vec<(BufferSnapshot, ExcerptRange<text::Anchor>, bool)>,
}
impl std::fmt::Display for FormatMultiBufferAsMarkedText {
@@ -684,25 +723,40 @@ impl std::fmt::Display for FormatMultiBufferAsMarkedText {
excerpts,
} = self;
- for (excerpt_id, snapshot, range, is_folded) in excerpts.into_iter() {
+ for (_snapshot, range, is_folded) in excerpts.into_iter() {
write!(f, "[EXCERPT]\n")?;
if *is_folded {
write!(f, "[FOLDED]\n")?;
}
+ let multibuffer_range = multibuffer_snapshot
+ .buffer_anchor_range_to_anchor_range(range.context.clone())
+ .unwrap();
+
let mut text = multibuffer_snapshot
- .text_for_range(Anchor::range_in_buffer(*excerpt_id, range.context.clone()))
+ .text_for_range(multibuffer_range.clone())
.collect::<String>();
let selections = selections
.iter()
- .filter(|&s| s.head().excerpt_id == *excerpt_id)
- .map(|s| {
- let head = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot)
- - text::ToOffset::to_offset(&range.context.start, &snapshot);
- let tail = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot)
- - text::ToOffset::to_offset(&range.context.start, &snapshot);
- tail..head
+ .filter(|&s| {
+ multibuffer_range
+ .start
+ .cmp(&s.head(), multibuffer_snapshot)
+ .is_le()
+ && multibuffer_range
+ .end
+ .cmp(&s.head(), multibuffer_snapshot)
+ .is_ge()
+ })
+ .filter_map(|s| {
+ let (head_anchor, buffer_snapshot) =
+ multibuffer_snapshot.anchor_to_buffer_anchor(s.head())?;
+ let head = text::ToOffset::to_offset(&head_anchor, buffer_snapshot)
+ - text::ToOffset::to_offset(&range.context.start, buffer_snapshot);
+ let tail = text::ToOffset::to_offset(&head_anchor, buffer_snapshot)
+ - text::ToOffset::to_offset(&range.context.start, buffer_snapshot);
+ Some(tail..head)
})
.rev()
.collect::<Vec<_>>();
@@ -47,7 +47,7 @@ impl ActiveBufferEncoding {
self.is_shared = project.is_shared();
self.is_via_remote_server = project.is_via_remote_server();
- if let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx) {
+ if let Some(buffer) = editor.read(cx).active_buffer(cx) {
let buffer = buffer.read(cx);
self.active_encoding = Some(buffer.encoding());
self.has_bom = buffer.has_bom();
@@ -47,11 +47,11 @@ impl EncodingSelector {
window: &mut Window,
cx: &mut Context<Workspace>,
) -> Option<()> {
- let (_, buffer, _) = workspace
+ let buffer = workspace
.active_item(cx)?
.act_as::<Editor>(cx)?
.read(cx)
- .active_excerpt(cx)?;
+ .active_buffer(cx)?;
let buffer_handle = buffer.read(cx);
let project = workspace.project().read(cx);
@@ -0,0 +1,15 @@
+[package]
+name = "env_var"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/env_var.rs"
+
+[dependencies]
+gpui.workspace = true
@@ -0,0 +1 @@
+../../LICENSE-GPL
@@ -0,0 +1,40 @@
+use gpui::SharedString;
+
+#[derive(Clone)]
+pub struct EnvVar {
+ pub name: SharedString,
+ /// Value of the environment variable. Also `None` when set to an empty string.
+ pub value: Option<String>,
+}
+
+impl EnvVar {
+ pub fn new(name: SharedString) -> Self {
+ let value = std::env::var(name.as_str()).ok();
+ if value.as_ref().is_some_and(|v| v.is_empty()) {
+ Self { name, value: None }
+ } else {
+ Self { name, value }
+ }
+ }
+
+ pub fn or(self, other: EnvVar) -> EnvVar {
+ if self.value.is_some() { self } else { other }
+ }
+}
+
+/// Creates a `LazyLock<EnvVar>` expression for use in a `static` declaration.
+#[macro_export]
+macro_rules! env_var {
+ ($name:expr) => {
+ ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into()))
+ };
+}
+
+/// Generates a `LazyLock<bool>` expression for use in a `static` declaration. Checks if the
+/// environment variable exists and is non-empty.
+#[macro_export]
+macro_rules! bool_env_var {
+ ($name:expr) => {
+ ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into()).value.is_some())
+ };
+}
@@ -1,7 +1,7 @@
use std::path::PathBuf;
use std::sync::Arc;
-use client::{Client, ProxySettings, UserStore};
+use client::{Client, ProxySettings, RefreshLlmTokenListener, UserStore};
use db::AppDatabase;
use extension::ExtensionHostProxy;
use fs::RealFs;
@@ -108,7 +108,8 @@ pub fn init(cx: &mut App) -> Arc<AgentCliAppState> {
let extension_host_proxy = ExtensionHostProxy::global(cx);
debug_adapter_extension::init(extension_host_proxy.clone(), cx);
language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone());
- language_model::init(user_store.clone(), client.clone(), cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
language_models::init(user_store.clone(), client.clone(), cx);
languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx);
prompt_store::init(cx);
@@ -296,16 +296,12 @@ impl ExtensionBuilder {
let remotes_output = util::command::new_command("git")
.arg("--git-dir")
.arg(&git_dir)
- .args(["remote", "-v"])
+ .args(["remote", "get-url", "origin"])
+ .env("GIT_CONFIG_GLOBAL", "/dev/null")
.output()
.await?;
let has_remote = remotes_output.status.success()
- && String::from_utf8_lossy(&remotes_output.stdout)
- .lines()
- .any(|line| {
- let mut parts = line.split(|c: char| c.is_whitespace());
- parts.next() == Some("origin") && parts.any(|part| part == url)
- });
+ && String::from_utf8_lossy(&remotes_output.stdout).trim() == url;
if !has_remote {
bail!(
"grammar directory '{}' already exists, but is not a git clone of '{}'",
@@ -47,12 +47,6 @@ impl FeatureFlag for DiffReviewFeatureFlag {
}
}
-pub struct GitGraphFeatureFlag;
-
-impl FeatureFlag for GitGraphFeatureFlag {
- const NAME: &'static str = "git-graph";
-}
-
pub struct StreamingEditFileToolFeatureFlag;
impl FeatureFlag for StreamingEditFileToolFeatureFlag {
@@ -10,6 +10,7 @@ use git::{
GRAPH_CHUNK_SIZE, GitRepository, GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder,
LogSource, PushOptions, Remote, RepoPath, ResetMode, SearchCommitArgs, Worktree,
},
+ stash::GitStash,
status::{
DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
UnmergedStatus,
@@ -35,8 +36,16 @@ pub struct FakeGitRepository {
pub(crate) is_trusted: Arc<AtomicBool>,
}
+#[derive(Debug, Clone)]
+pub struct FakeCommitSnapshot {
+ pub head_contents: HashMap<RepoPath, String>,
+ pub index_contents: HashMap<RepoPath, String>,
+ pub sha: String,
+}
+
#[derive(Debug, Clone)]
pub struct FakeGitRepositoryState {
+ pub commit_history: Vec<FakeCommitSnapshot>,
pub event_emitter: smol::channel::Sender<PathBuf>,
pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
pub head_contents: HashMap<RepoPath, String>,
@@ -53,7 +62,7 @@ pub struct FakeGitRepositoryState {
pub simulated_create_worktree_error: Option<String>,
pub refs: HashMap<String, String>,
pub graph_commits: Vec<Arc<InitialGraphCommitData>>,
- pub worktrees: Vec<Worktree>,
+ pub stash_entries: GitStash,
}
impl FakeGitRepositoryState {
@@ -73,7 +82,8 @@ impl FakeGitRepositoryState {
oids: Default::default(),
remotes: HashMap::default(),
graph_commits: Vec::new(),
- worktrees: Vec::new(),
+ commit_history: Vec::new(),
+ stash_entries: Default::default(),
}
}
}
@@ -216,11 +226,52 @@ impl GitRepository for FakeGitRepository {
fn reset(
&self,
- _commit: String,
- _mode: ResetMode,
+ commit: String,
+ mode: ResetMode,
_env: Arc<HashMap<String, String>>,
) -> BoxFuture<'_, Result<()>> {
- unimplemented!()
+ self.with_state_async(true, move |state| {
+ let pop_count = if commit == "HEAD~" || commit == "HEAD^" {
+ 1
+ } else if let Some(suffix) = commit.strip_prefix("HEAD~") {
+ suffix
+ .parse::<usize>()
+ .with_context(|| format!("Invalid HEAD~ offset: {commit}"))?
+ } else {
+ match state
+ .commit_history
+ .iter()
+ .rposition(|entry| entry.sha == commit)
+ {
+ Some(index) => state.commit_history.len() - index,
+ None => anyhow::bail!("Unknown commit ref: {commit}"),
+ }
+ };
+
+ if pop_count == 0 || pop_count > state.commit_history.len() {
+ anyhow::bail!(
+ "Cannot reset {pop_count} commit(s): only {} in history",
+ state.commit_history.len()
+ );
+ }
+
+ let target_index = state.commit_history.len() - pop_count;
+ let snapshot = state.commit_history[target_index].clone();
+ state.commit_history.truncate(target_index);
+
+ match mode {
+ ResetMode::Soft => {
+ state.head_contents = snapshot.head_contents;
+ }
+ ResetMode::Mixed => {
+ state.head_contents = snapshot.head_contents;
+ state.index_contents = state.head_contents.clone();
+ }
+ }
+
+ state.refs.insert("HEAD".into(), snapshot.sha);
+ Ok(())
+ })
}
fn checkout_files(
@@ -380,13 +431,13 @@ impl GitRepository for FakeGitRepository {
}
fn stash_entries(&self) -> BoxFuture<'_, Result<git::stash::GitStash>> {
- async { Ok(git::stash::GitStash::default()) }.boxed()
+ self.with_state_async(false, |state| Ok(state.stash_entries.clone()))
}
fn branches(&self) -> BoxFuture<'_, Result<Vec<Branch>>> {
self.with_state_async(false, move |state| {
let current_branch = &state.current_branch_name;
- Ok(state
+ let mut branches = state
.branches
.iter()
.map(|branch_name| {
@@ -404,78 +455,163 @@ impl GitRepository for FakeGitRepository {
upstream: None,
}
})
- .collect())
+ .collect::<Vec<_>>();
+ // compute snapshot expects these to be sorted by ref_name
+ // because that's what git itself does
+ branches.sort_by(|a, b| a.ref_name.cmp(&b.ref_name));
+ Ok(branches)
})
}
fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
- let dot_git_path = self.dot_git_path.clone();
- self.with_state_async(false, move |state| {
- let work_dir = dot_git_path
- .parent()
- .map(PathBuf::from)
- .unwrap_or(dot_git_path);
- let head_sha = state
- .refs
- .get("HEAD")
- .cloned()
- .unwrap_or_else(|| "0000000".to_string());
- let branch_ref = state
- .current_branch_name
- .as_ref()
- .map(|name| format!("refs/heads/{name}"))
- .unwrap_or_else(|| "refs/heads/main".to_string());
- let main_worktree = Worktree {
- path: work_dir,
- ref_name: Some(branch_ref.into()),
- sha: head_sha.into(),
- is_main: true,
- };
+ let fs = self.fs.clone();
+ let common_dir_path = self.common_dir_path.clone();
+ let executor = self.executor.clone();
+
+ async move {
+ executor.simulate_random_delay().await;
+
+ let (main_worktree, refs) = fs.with_git_state(&common_dir_path, false, |state| {
+ let work_dir = common_dir_path
+ .parent()
+ .map(PathBuf::from)
+ .unwrap_or_else(|| common_dir_path.clone());
+ let head_sha = state
+ .refs
+ .get("HEAD")
+ .cloned()
+ .unwrap_or_else(|| "0000000".to_string());
+ let branch_ref = state
+ .current_branch_name
+ .as_ref()
+ .map(|name| format!("refs/heads/{name}"))
+ .unwrap_or_else(|| "refs/heads/main".to_string());
+ let main_wt = Worktree {
+ path: work_dir,
+ ref_name: Some(branch_ref.into()),
+ sha: head_sha.into(),
+ is_main: true,
+ };
+ (main_wt, state.refs.clone())
+ })?;
+
let mut all = vec![main_worktree];
- all.extend(state.worktrees.iter().cloned());
+
+ let worktrees_dir = common_dir_path.join("worktrees");
+ if let Ok(mut entries) = fs.read_dir(&worktrees_dir).await {
+ use futures::StreamExt;
+ while let Some(Ok(entry_path)) = entries.next().await {
+ let head_content = match fs.load(&entry_path.join("HEAD")).await {
+ Ok(content) => content,
+ Err(_) => continue,
+ };
+ let gitdir_content = match fs.load(&entry_path.join("gitdir")).await {
+ Ok(content) => content,
+ Err(_) => continue,
+ };
+
+ let ref_name = head_content
+ .strip_prefix("ref: ")
+ .map(|s| s.trim().to_string());
+ let sha = ref_name
+ .as_ref()
+ .and_then(|r| refs.get(r))
+ .cloned()
+ .unwrap_or_else(|| head_content.trim().to_string());
+
+ let worktree_path = PathBuf::from(gitdir_content.trim())
+ .parent()
+ .map(PathBuf::from)
+ .unwrap_or_default();
+
+ all.push(Worktree {
+ path: worktree_path,
+ ref_name: ref_name.map(Into::into),
+ sha: sha.into(),
+ is_main: false,
+ });
+ }
+ }
+
Ok(all)
- })
+ }
+ .boxed()
}
fn create_worktree(
&self,
- branch_name: String,
+ branch_name: Option<String>,
path: PathBuf,
from_commit: Option<String>,
) -> BoxFuture<'_, Result<()>> {
let fs = self.fs.clone();
let executor = self.executor.clone();
let dot_git_path = self.dot_git_path.clone();
+ let common_dir_path = self.common_dir_path.clone();
async move {
executor.simulate_random_delay().await;
- // Check for simulated error before any side effects
+ // Check for simulated error and duplicate branch before any side effects.
fs.with_git_state(&dot_git_path, false, |state| {
if let Some(message) = &state.simulated_create_worktree_error {
anyhow::bail!("{message}");
}
+ if let Some(ref name) = branch_name {
+ if state.branches.contains(name) {
+ bail!("a branch named '{}' already exists", name);
+ }
+ }
Ok(())
})??;
- // Create directory before updating state so state is never
- // inconsistent with the filesystem
+
+ // Create the worktree checkout directory.
fs.create_dir(&path).await?;
- fs.with_git_state(&dot_git_path, true, {
- let path = path.clone();
- move |state| {
- if state.branches.contains(&branch_name) {
- bail!("a branch named '{}' already exists", branch_name);
- }
+
+ // Create .git/worktrees/<name>/ directory with HEAD, commondir, gitdir.
+ let worktree_entry_name = branch_name
+ .as_deref()
+ .unwrap_or_else(|| path.file_name().unwrap().to_str().unwrap());
+ let worktrees_entry_dir = common_dir_path.join("worktrees").join(worktree_entry_name);
+ fs.create_dir(&worktrees_entry_dir).await?;
+
+ let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string());
+ let head_content = if let Some(ref branch_name) = branch_name {
+ let ref_name = format!("refs/heads/{branch_name}");
+ format!("ref: {ref_name}")
+ } else {
+ sha.clone()
+ };
+ fs.write_file_internal(
+ worktrees_entry_dir.join("HEAD"),
+ head_content.into_bytes(),
+ false,
+ )?;
+ fs.write_file_internal(
+ worktrees_entry_dir.join("commondir"),
+ common_dir_path.to_string_lossy().into_owned().into_bytes(),
+ false,
+ )?;
+ let worktree_dot_git = path.join(".git");
+ fs.write_file_internal(
+ worktrees_entry_dir.join("gitdir"),
+ worktree_dot_git.to_string_lossy().into_owned().into_bytes(),
+ false,
+ )?;
+
+ // Create .git file in the worktree checkout.
+ fs.write_file_internal(
+ &worktree_dot_git,
+ format!("gitdir: {}", worktrees_entry_dir.display()).into_bytes(),
+ false,
+ )?;
+
+ // Update git state: add ref and branch.
+ fs.with_git_state(&dot_git_path, true, move |state| {
+ if let Some(branch_name) = branch_name {
let ref_name = format!("refs/heads/{branch_name}");
- let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string());
- state.refs.insert(ref_name.clone(), sha.clone());
- state.worktrees.push(Worktree {
- path,
- ref_name: Some(ref_name.into()),
- sha: sha.into(),
- is_main: false,
- });
+ state.refs.insert(ref_name, sha);
state.branches.insert(branch_name);
- Ok::<(), anyhow::Error>(())
}
+ Ok::<(), anyhow::Error>(())
})??;
Ok(())
}
@@ -485,20 +621,23 @@ impl GitRepository for FakeGitRepository {
fn remove_worktree(&self, path: PathBuf, _force: bool) -> BoxFuture<'_, Result<()>> {
let fs = self.fs.clone();
let executor = self.executor.clone();
- let dot_git_path = self.dot_git_path.clone();
+ let common_dir_path = self.common_dir_path.clone();
async move {
executor.simulate_random_delay().await;
- // Validate the worktree exists in state before touching the filesystem
- fs.with_git_state(&dot_git_path, false, {
- let path = path.clone();
- move |state| {
- if !state.worktrees.iter().any(|w| w.path == path) {
- bail!("no worktree found at path: {}", path.display());
- }
- Ok(())
- }
- })??;
- // Now remove the directory
+
+ // Read the worktree's .git file to find its entry directory.
+ let dot_git_file = path.join(".git");
+ let content = fs
+ .load(&dot_git_file)
+ .await
+ .with_context(|| format!("no worktree found at path: {}", path.display()))?;
+ let gitdir = content
+ .strip_prefix("gitdir:")
+ .context("invalid .git file in worktree")?
+ .trim();
+ let worktree_entry_dir = PathBuf::from(gitdir);
+
+ // Remove the worktree checkout directory.
fs.remove_dir(
&path,
RemoveOptions {
@@ -507,11 +646,21 @@ impl GitRepository for FakeGitRepository {
},
)
.await?;
- // Update state
- fs.with_git_state(&dot_git_path, true, move |state| {
- state.worktrees.retain(|worktree| worktree.path != path);
- Ok::<(), anyhow::Error>(())
- })??;
+
+ // Remove the .git/worktrees/<name>/ directory.
+ fs.remove_dir(
+ &worktree_entry_dir,
+ RemoveOptions {
+ recursive: true,
+ ignore_if_not_exists: false,
+ },
+ )
+ .await?;
+
+ // Emit a git event on the main .git directory so the scanner
+ // notices the change.
+ fs.with_git_state(&common_dir_path, true, |_| {})?;
+
Ok(())
}
.boxed()
@@ -520,20 +669,23 @@ impl GitRepository for FakeGitRepository {
fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> {
let fs = self.fs.clone();
let executor = self.executor.clone();
- let dot_git_path = self.dot_git_path.clone();
+ let common_dir_path = self.common_dir_path.clone();
async move {
executor.simulate_random_delay().await;
- // Validate the worktree exists in state before touching the filesystem
- fs.with_git_state(&dot_git_path, false, {
- let old_path = old_path.clone();
- move |state| {
- if !state.worktrees.iter().any(|w| w.path == old_path) {
- bail!("no worktree found at path: {}", old_path.display());
- }
- Ok(())
- }
- })??;
- // Now move the directory
+
+ // Read the worktree's .git file to find its entry directory.
+ let dot_git_file = old_path.join(".git");
+ let content = fs
+ .load(&dot_git_file)
+ .await
+ .with_context(|| format!("no worktree found at path: {}", old_path.display()))?;
+ let gitdir = content
+ .strip_prefix("gitdir:")
+ .context("invalid .git file in worktree")?
+ .trim();
+ let worktree_entry_dir = PathBuf::from(gitdir);
+
+ // Move the worktree checkout directory.
fs.rename(
&old_path,
&new_path,
@@ -544,16 +696,27 @@ impl GitRepository for FakeGitRepository {
},
)
.await?;
- // Update state
- fs.with_git_state(&dot_git_path, true, move |state| {
- let worktree = state
- .worktrees
- .iter_mut()
- .find(|worktree| worktree.path == old_path)
- .expect("worktree was validated above");
- worktree.path = new_path;
- Ok::<(), anyhow::Error>(())
- })??;
+
+ // Update the gitdir file in .git/worktrees/<name>/ to point to the
+ // new location.
+ let new_dot_git = new_path.join(".git");
+ fs.write_file_internal(
+ worktree_entry_dir.join("gitdir"),
+ new_dot_git.to_string_lossy().into_owned().into_bytes(),
+ false,
+ )?;
+
+ // Update the .git file in the moved worktree checkout.
+ fs.write_file_internal(
+ &new_dot_git,
+ format!("gitdir: {}", worktree_entry_dir.display()).into_bytes(),
+ false,
+ )?;
+
+ // Emit a git event on the main .git directory so the scanner
+ // notices the change.
+ fs.with_git_state(&common_dir_path, true, |_| {})?;
+
Ok(())
}
.boxed()
@@ -722,11 +885,30 @@ impl GitRepository for FakeGitRepository {
&self,
_message: gpui::SharedString,
_name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
- _options: CommitOptions,
+ options: CommitOptions,
_askpass: AskPassDelegate,
_env: Arc<HashMap<String, String>>,
) -> BoxFuture<'_, Result<()>> {
- async { Ok(()) }.boxed()
+ self.with_state_async(true, move |state| {
+ if !options.allow_empty && !options.amend && state.index_contents == state.head_contents
+ {
+ anyhow::bail!("nothing to commit (use allow_empty to create an empty commit)");
+ }
+
+ let old_sha = state.refs.get("HEAD").cloned().unwrap_or_default();
+ state.commit_history.push(FakeCommitSnapshot {
+ head_contents: state.head_contents.clone(),
+ index_contents: state.index_contents.clone(),
+ sha: old_sha,
+ });
+
+ state.head_contents = state.index_contents.clone();
+
+ let new_sha = format!("fake-commit-{}", state.commit_history.len());
+ state.refs.insert("HEAD".into(), new_sha);
+
+ Ok(())
+ })
}
fn run_hook(
@@ -960,10 +1142,88 @@ impl GitRepository for FakeGitRepository {
fn diff_checkpoints(
&self,
- _base_checkpoint: GitRepositoryCheckpoint,
- _target_checkpoint: GitRepositoryCheckpoint,
+ base_checkpoint: GitRepositoryCheckpoint,
+ target_checkpoint: GitRepositoryCheckpoint,
) -> BoxFuture<'_, Result<String>> {
- unimplemented!()
+ let executor = self.executor.clone();
+ let checkpoints = self.checkpoints.clone();
+ async move {
+ executor.simulate_random_delay().await;
+ let checkpoints = checkpoints.lock();
+ let base = checkpoints
+ .get(&base_checkpoint.commit_sha)
+ .context(format!(
+ "invalid base checkpoint: {}",
+ base_checkpoint.commit_sha
+ ))?;
+ let target = checkpoints
+ .get(&target_checkpoint.commit_sha)
+ .context(format!(
+ "invalid target checkpoint: {}",
+ target_checkpoint.commit_sha
+ ))?;
+
+ fn collect_files(
+ entry: &FakeFsEntry,
+ prefix: String,
+ out: &mut std::collections::BTreeMap<String, String>,
+ ) {
+ match entry {
+ FakeFsEntry::File { content, .. } => {
+ out.insert(prefix, String::from_utf8_lossy(content).into_owned());
+ }
+ FakeFsEntry::Dir { entries, .. } => {
+ for (name, child) in entries {
+ let path = if prefix.is_empty() {
+ name.clone()
+ } else {
+ format!("{prefix}/{name}")
+ };
+ collect_files(child, path, out);
+ }
+ }
+ FakeFsEntry::Symlink { .. } => {}
+ }
+ }
+
+ let mut base_files = std::collections::BTreeMap::new();
+ let mut target_files = std::collections::BTreeMap::new();
+ collect_files(base, String::new(), &mut base_files);
+ collect_files(target, String::new(), &mut target_files);
+
+ let all_paths: std::collections::BTreeSet<&String> =
+ base_files.keys().chain(target_files.keys()).collect();
+
+ let mut diff = String::new();
+ for path in all_paths {
+ match (base_files.get(path), target_files.get(path)) {
+ (Some(base_content), Some(target_content))
+ if base_content != target_content =>
+ {
+ diff.push_str(&format!("diff --git a/{path} b/{path}\n"));
+ diff.push_str(&format!("--- a/{path}\n"));
+ diff.push_str(&format!("+++ b/{path}\n"));
+ for line in base_content.lines() {
+ diff.push_str(&format!("-{line}\n"));
+ }
+ for line in target_content.lines() {
+ diff.push_str(&format!("+{line}\n"));
+ }
+ }
+ (Some(_), None) => {
+ diff.push_str(&format!("diff --git a/{path} /dev/null\n"));
+ diff.push_str("deleted file\n");
+ }
+ (None, Some(_)) => {
+ diff.push_str(&format!("diff --git /dev/null b/{path}\n"));
+ diff.push_str("new file\n");
+ }
+ _ => {}
+ }
+ }
+ Ok(diff)
+ }
+ .boxed()
}
fn default_branch(
@@ -1032,6 +1292,24 @@ impl GitRepository for FakeGitRepository {
anyhow::bail!("commit_data_reader not supported for FakeGitRepository")
}
+ fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>> {
+ self.with_state_async(true, move |state| {
+ state.refs.insert(ref_name, commit);
+ Ok(())
+ })
+ }
+
+ fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>> {
+ self.with_state_async(true, move |state| {
+ state.refs.remove(&ref_name);
+ Ok(())
+ })
+ }
+
+ fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>> {
+ async { Ok(()) }.boxed()
+ }
+
fn set_trusted(&self, trusted: bool) {
self.is_trusted
.store(trusted, std::sync::atomic::Ordering::Release);
@@ -57,7 +57,7 @@ use collections::{BTreeMap, btree_map};
use fake_git_repo::FakeGitRepositoryState;
#[cfg(feature = "test-support")]
use git::{
- repository::{InitialGraphCommitData, RepoPath, repo_path},
+ repository::{InitialGraphCommitData, RepoPath, Worktree, repo_path},
status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus},
};
#[cfg(feature = "test-support")]
@@ -1892,11 +1892,15 @@ impl FakeFs {
anyhow::bail!("gitfile points to a non-directory")
};
let common_dir = if let Some(child) = entries.get("commondir") {
- Path::new(
- std::str::from_utf8(child.file_content("commondir".as_ref())?)
- .context("commondir content")?,
- )
- .to_owned()
+ let raw = std::str::from_utf8(child.file_content("commondir".as_ref())?)
+ .context("commondir content")?
+ .trim();
+ let raw_path = Path::new(raw);
+ if raw_path.is_relative() {
+ normalize_path(&canonical_path.join(raw_path))
+ } else {
+ raw_path.to_owned()
+ }
} else {
canonical_path.clone()
};
@@ -1960,6 +1964,116 @@ impl FakeFs {
.unwrap();
}
+ pub async fn add_linked_worktree_for_repo(
+ &self,
+ dot_git: &Path,
+ emit_git_event: bool,
+ worktree: Worktree,
+ ) {
+ let ref_name = worktree
+ .ref_name
+ .as_ref()
+ .expect("linked worktree must have a ref_name");
+ let branch_name = ref_name
+ .strip_prefix("refs/heads/")
+ .unwrap_or(ref_name.as_ref());
+
+ // Create ref in git state.
+ self.with_git_state(dot_git, false, |state| {
+ state
+ .refs
+ .insert(ref_name.to_string(), worktree.sha.to_string());
+ })
+ .unwrap();
+
+ // Create .git/worktrees/<name>/ directory with HEAD, commondir, and gitdir.
+ let worktrees_entry_dir = dot_git.join("worktrees").join(branch_name);
+ self.create_dir(&worktrees_entry_dir).await.unwrap();
+
+ self.write_file_internal(
+ worktrees_entry_dir.join("HEAD"),
+ format!("ref: {ref_name}").into_bytes(),
+ false,
+ )
+ .unwrap();
+
+ self.write_file_internal(
+ worktrees_entry_dir.join("commondir"),
+ dot_git.to_string_lossy().into_owned().into_bytes(),
+ false,
+ )
+ .unwrap();
+
+ let worktree_dot_git = worktree.path.join(".git");
+ self.write_file_internal(
+ worktrees_entry_dir.join("gitdir"),
+ worktree_dot_git.to_string_lossy().into_owned().into_bytes(),
+ false,
+ )
+ .unwrap();
+
+ // Create the worktree checkout directory with a .git file pointing back.
+ self.create_dir(&worktree.path).await.unwrap();
+
+ self.write_file_internal(
+ &worktree_dot_git,
+ format!("gitdir: {}", worktrees_entry_dir.display()).into_bytes(),
+ false,
+ )
+ .unwrap();
+
+ if emit_git_event {
+ self.with_git_state(dot_git, true, |_| {}).unwrap();
+ }
+ }
+
+ pub async fn remove_worktree_for_repo(
+ &self,
+ dot_git: &Path,
+ emit_git_event: bool,
+ ref_name: &str,
+ ) {
+ let branch_name = ref_name.strip_prefix("refs/heads/").unwrap_or(ref_name);
+ let worktrees_entry_dir = dot_git.join("worktrees").join(branch_name);
+
+ // Read gitdir to find the worktree checkout path.
+ let gitdir_content = self
+ .load_internal(worktrees_entry_dir.join("gitdir"))
+ .await
+ .unwrap();
+ let gitdir_str = String::from_utf8(gitdir_content).unwrap();
+ let worktree_path = PathBuf::from(gitdir_str.trim())
+ .parent()
+ .map(PathBuf::from)
+ .unwrap_or_default();
+
+ // Remove the worktree checkout directory.
+ self.remove_dir(
+ &worktree_path,
+ RemoveOptions {
+ recursive: true,
+ ignore_if_not_exists: true,
+ },
+ )
+ .await
+ .unwrap();
+
+ // Remove the .git/worktrees/<name>/ directory.
+ self.remove_dir(
+ &worktrees_entry_dir,
+ RemoveOptions {
+ recursive: true,
+ ignore_if_not_exists: false,
+ },
+ )
+ .await
+ .unwrap();
+
+ if emit_git_event {
+ self.with_git_state(dot_git, true, |_| {}).unwrap();
+ }
+ }
+
pub fn set_unmerged_paths_for_repo(
&self,
dot_git: &Path,
@@ -24,7 +24,7 @@ async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) {
// Create a worktree
let worktree_1_dir = worktrees_dir.join("feature-branch");
repo.create_worktree(
- "feature-branch".to_string(),
+ Some("feature-branch".to_string()),
worktree_1_dir.clone(),
Some("abc123".to_string()),
)
@@ -47,9 +47,13 @@ async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) {
// Create a second worktree (without explicit commit)
let worktree_2_dir = worktrees_dir.join("bugfix-branch");
- repo.create_worktree("bugfix-branch".to_string(), worktree_2_dir.clone(), None)
- .await
- .unwrap();
+ repo.create_worktree(
+ Some("bugfix-branch".to_string()),
+ worktree_2_dir.clone(),
+ None,
+ )
+ .await
+ .unwrap();
let worktrees = repo.worktrees().await.unwrap();
assert_eq!(worktrees.len(), 3);
@@ -155,7 +159,10 @@ async fn test_checkpoints(executor: BackgroundExecutor) {
.unwrap()
);
- repository.restore_checkpoint(checkpoint_1).await.unwrap();
+ repository
+ .restore_checkpoint(checkpoint_1.clone())
+ .await
+ .unwrap();
assert_eq!(
fs.files_with_contents(Path::new("")),
[
@@ -164,4 +171,22 @@ async fn test_checkpoints(executor: BackgroundExecutor) {
(Path::new(path!("/foo/b")).into(), b"ipsum".into())
]
);
+
+ // diff_checkpoints: identical checkpoints produce empty diff
+ let diff = repository
+ .diff_checkpoints(checkpoint_2.clone(), checkpoint_3.clone())
+ .await
+ .unwrap();
+ assert!(
+ diff.is_empty(),
+ "identical checkpoints should produce empty diff"
+ );
+
+ // diff_checkpoints: different checkpoints produce non-empty diff
+ let diff = repository
+ .diff_checkpoints(checkpoint_1.clone(), checkpoint_2.clone())
+ .await
+ .unwrap();
+ assert!(diff.contains("b"), "diff should mention changed file 'b'");
+ assert!(diff.contains("c"), "diff should mention added file 'c'");
}
@@ -1,5 +1,9 @@
use std::iter::FromIterator;
+pub fn simple_lowercase(c: char) -> char {
+ c.to_lowercase().next().unwrap_or(c)
+}
+
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Hash)]
pub struct CharBag(u64);
@@ -9,7 +13,7 @@ impl CharBag {
}
fn insert(&mut self, c: char) {
- let c = c.to_ascii_lowercase();
+ let c = simple_lowercase(c);
if c.is_ascii_lowercase() {
let mut count = self.0;
let idx = c as u8 - b'a';
@@ -1,10 +1,9 @@
use std::{
borrow::Borrow,
- collections::BTreeMap,
sync::atomic::{self, AtomicBool},
};
-use crate::CharBag;
+use crate::{CharBag, char_bag::simple_lowercase};
const BASE_DISTANCE_PENALTY: f64 = 0.6;
const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05;
@@ -69,7 +68,6 @@ impl<'a> Matcher<'a> {
{
let mut candidate_chars = Vec::new();
let mut lowercase_candidate_chars = Vec::new();
- let mut extra_lowercase_chars = BTreeMap::new();
for candidate in candidates {
if !candidate.borrow().has_chars(self.query_char_bag) {
@@ -82,14 +80,9 @@ impl<'a> Matcher<'a> {
candidate_chars.clear();
lowercase_candidate_chars.clear();
- extra_lowercase_chars.clear();
- for (i, c) in candidate.borrow().candidate_chars().enumerate() {
+ for c in candidate.borrow().candidate_chars() {
candidate_chars.push(c);
- let mut char_lowercased = c.to_lowercase().collect::<Vec<_>>();
- if char_lowercased.len() > 1 {
- extra_lowercase_chars.insert(i, char_lowercased.len() - 1);
- }
- lowercase_candidate_chars.append(&mut char_lowercased);
+ lowercase_candidate_chars.push(simple_lowercase(c));
}
if !self.find_last_positions(lowercase_prefix, &lowercase_candidate_chars) {
@@ -108,7 +101,6 @@ impl<'a> Matcher<'a> {
&lowercase_candidate_chars,
prefix,
lowercase_prefix,
- &extra_lowercase_chars,
);
if score > 0.0 {
@@ -146,7 +138,6 @@ impl<'a> Matcher<'a> {
path_lowercased: &[char],
prefix: &[char],
lowercase_prefix: &[char],
- extra_lowercase_chars: &BTreeMap<usize, usize>,
) -> f64 {
let score = self.recursive_score_match(
path,
@@ -156,7 +147,6 @@ impl<'a> Matcher<'a> {
0,
0,
self.query.len() as f64,
- extra_lowercase_chars,
) * self.query.len() as f64;
if score <= 0.0 {
@@ -201,7 +191,6 @@ impl<'a> Matcher<'a> {
query_idx: usize,
path_idx: usize,
cur_score: f64,
- extra_lowercase_chars: &BTreeMap<usize, usize>,
) -> f64 {
if query_idx == self.query.len() {
return 1.0;
@@ -228,13 +217,6 @@ impl<'a> Matcher<'a> {
let mut last_slash = 0;
for j in path_idx..=safe_limit {
- let extra_lowercase_chars_count = extra_lowercase_chars
- .iter()
- .take_while(|&(&i, _)| i < j)
- .map(|(_, increment)| increment)
- .sum::<usize>();
- let j_regular = j - extra_lowercase_chars_count;
-
let path_char = if j < prefix.len() {
lowercase_prefix[j]
} else {
@@ -247,20 +229,20 @@ impl<'a> Matcher<'a> {
let is_path_sep = path_char == '/';
if query_idx == 0 && is_path_sep {
- last_slash = j_regular;
+ last_slash = j;
}
let need_to_score = query_char == path_char || (is_path_sep && query_char == '_');
if need_to_score {
- let curr = match prefix.get(j_regular) {
+ let curr = match prefix.get(j) {
Some(&curr) => curr,
- None => path[j_regular - prefix.len()],
+ None => path[j - prefix.len()],
};
let mut char_score = 1.0;
if j > path_idx {
- let last = match prefix.get(j_regular - 1) {
+ let last = match prefix.get(j - 1) {
Some(&last) => last,
- None => path[j_regular - 1 - prefix.len()],
+ None => path[j - 1 - prefix.len()],
};
if last == '/' {
@@ -316,12 +298,11 @@ impl<'a> Matcher<'a> {
query_idx + 1,
j + 1,
next_score,
- extra_lowercase_chars,
) * multiplier;
if new_score > score {
score = new_score;
- best_position = j_regular;
+ best_position = j;
// Optimization: can't score better than 1.
if new_score == 1.0 {
break;
@@ -469,12 +450,12 @@ mod tests {
assert_eq!(
match_single_path_query("İo/oluş", false, &mixed_unicode_paths),
- vec![("İolu/oluş", vec![0, 2, 4, 6, 8, 10, 12])]
+ vec![("İolu/oluş", vec![0, 2, 5, 6, 7, 8, 9])]
);
assert_eq!(
match_single_path_query("İst/code", false, &mixed_unicode_paths),
- vec![("İstanbul/code", vec![0, 2, 4, 6, 8, 10, 12, 14])]
+ vec![("İstanbul/code", vec![0, 2, 3, 9, 10, 11, 12, 13])]
);
assert_eq!(
@@ -536,12 +517,60 @@ mod tests {
);
}
+ #[test]
+ fn test_positions_are_valid_char_boundaries_with_expanding_lowercase() {
+ // İ (U+0130) lowercases to "i\u{307}" (2 chars) under full case folding.
+ // With simple case mapping (used by this matcher), İ → 'i' (1 char),
+ // so positions remain valid byte boundaries.
+ let paths = vec!["İstanbul/code.rs", "aİbİc/dİeİf.txt", "src/İmport/İndex.ts"];
+
+ for query in &["code", "İst", "dİe", "İndex", "İmport", "abcdef"] {
+ let results = match_single_path_query(query, false, &paths);
+ for (path, positions) in &results {
+ for &pos in positions {
+ assert!(
+ path.is_char_boundary(pos),
+ "Position {pos} is not a valid char boundary in path {path:?} \
+ (query: {query:?}, all positions: {positions:?})"
+ );
+ }
+ }
+ }
+ }
+
+ #[test]
+ fn test_positions_valid_with_various_multibyte_chars() {
+ // German ß uppercases to SS but lowercases to itself — no expansion.
+ // Armenian ligatures and other characters that could expand under full
+ // case folding should still produce valid byte boundaries.
+ let paths = vec![
+ "straße/config.rs",
+ "Straße/München/file.txt",
+ "file/path.rs", // fi (U+FB01, fi ligature)
+ "ffoo/bar.txt", // ff (U+FB00, ff ligature)
+ "aÇbŞc/dÖeÜf.txt", // Turkish chars that don't expand
+ ];
+
+ for query in &["config", "Mün", "file", "bar", "abcdef", "straße", "ÇŞ"] {
+ let results = match_single_path_query(query, false, &paths);
+ for (path, positions) in &results {
+ for &pos in positions {
+ assert!(
+ path.is_char_boundary(pos),
+ "Position {pos} is not a valid char boundary in path {path:?} \
+ (query: {query:?}, all positions: {positions:?})"
+ );
+ }
+ }
+ }
+ }
+
fn match_single_path_query<'a>(
query: &str,
smart_case: bool,
paths: &[&'a str],
) -> Vec<(&'a str, Vec<usize>)> {
- let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
+ let lowercase_query = query.chars().map(simple_lowercase).collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let query_chars = CharBag::from(&lowercase_query[..]);
@@ -551,7 +580,7 @@ mod tests {
.collect::<Vec<_>>();
let mut path_entries = Vec::new();
for (i, path) in paths.iter().enumerate() {
- let lowercase_path = path.to_lowercase().chars().collect::<Vec<_>>();
+ let lowercase_path: Vec<char> = path.chars().map(simple_lowercase).collect();
let char_bag = CharBag::from(lowercase_path.as_slice());
path_entries.push(PathMatchCandidate {
is_dir: false,
@@ -10,6 +10,7 @@ use util::{paths::PathStyle, rel_path::RelPath};
use crate::{
CharBag,
+ char_bag::simple_lowercase,
matcher::{MatchCandidate, Matcher},
};
@@ -94,7 +95,7 @@ pub fn match_fixed_path_set(
max_results: usize,
path_style: PathStyle,
) -> Vec<PathMatch> {
- let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
+ let lowercase_query = query.chars().map(simple_lowercase).collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let query_char_bag = CharBag::from(&lowercase_query[..]);
@@ -110,7 +111,7 @@ pub fn match_fixed_path_set(
path_prefix_chars.extend(path_style.primary_separator().chars());
let lowercase_pfx = path_prefix_chars
.iter()
- .map(|c| c.to_ascii_lowercase())
+ .map(|c| simple_lowercase(*c))
.collect::<Vec<_>>();
(worktree_root_name, path_prefix_chars, lowercase_pfx)
@@ -171,7 +172,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>(
let lowercase_query = query
.iter()
- .map(|query| query.to_ascii_lowercase())
+ .map(|query| simple_lowercase(*query))
.collect::<Vec<_>>();
let query = &query;
@@ -217,7 +218,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>(
}
let lowercase_prefix = prefix
.iter()
- .map(|c| c.to_ascii_lowercase())
+ .map(|c| simple_lowercase(*c))
.collect::<Vec<_>>();
matcher.match_candidates(
&prefix,
@@ -1,5 +1,6 @@
use crate::{
CharBag,
+ char_bag::simple_lowercase,
matcher::{MatchCandidate, Matcher},
};
use gpui::BackgroundExecutor;
@@ -141,7 +142,7 @@ where
.collect();
}
- let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
+ let lowercase_query = query.chars().map(simple_lowercase).collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let lowercase_query = &lowercase_query;
@@ -329,6 +329,7 @@ impl Upstream {
pub struct CommitOptions {
pub amend: bool,
pub signoff: bool,
+ pub allow_empty: bool,
}
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
@@ -715,7 +716,7 @@ pub trait GitRepository: Send + Sync {
fn create_worktree(
&self,
- branch_name: String,
+ branch_name: Option<String>,
path: PathBuf,
from_commit: Option<String>,
) -> BoxFuture<'_, Result<()>>;
@@ -916,6 +917,12 @@ pub trait GitRepository: Send + Sync {
fn commit_data_reader(&self) -> Result<CommitDataReader>;
+ fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>>;
+
+ fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>>;
+
+ fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>>;
+
fn set_trusted(&self, trusted: bool);
fn is_trusted(&self) -> bool;
}
@@ -1660,19 +1667,20 @@ impl GitRepository for RealGitRepository {
fn create_worktree(
&self,
- branch_name: String,
+ branch_name: Option<String>,
path: PathBuf,
from_commit: Option<String>,
) -> BoxFuture<'_, Result<()>> {
let git_binary = self.git_binary();
- let mut args = vec![
- OsString::from("worktree"),
- OsString::from("add"),
- OsString::from("-b"),
- OsString::from(branch_name.as_str()),
- OsString::from("--"),
- OsString::from(path.as_os_str()),
- ];
+ let mut args = vec![OsString::from("worktree"), OsString::from("add")];
+ if let Some(branch_name) = &branch_name {
+ args.push(OsString::from("-b"));
+ args.push(OsString::from(branch_name.as_str()));
+ } else {
+ args.push(OsString::from("--detach"));
+ }
+ args.push(OsString::from("--"));
+ args.push(OsString::from(path.as_os_str()));
if let Some(from_commit) = from_commit {
args.push(OsString::from(from_commit));
} else {
@@ -2165,6 +2173,10 @@ impl GitRepository for RealGitRepository {
cmd.arg("--signoff");
}
+ if options.allow_empty {
+ cmd.arg("--allow-empty");
+ }
+
if let Some((name, email)) = name_and_email {
cmd.arg("--author").arg(&format!("{name} <{email}>"));
}
@@ -2176,6 +2188,39 @@ impl GitRepository for RealGitRepository {
.boxed()
}
+ fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>> {
+ let git_binary = self.git_binary();
+ self.executor
+ .spawn(async move {
+ let args: Vec<OsString> = vec!["update-ref".into(), ref_name.into(), commit.into()];
+ git_binary?.run(&args).await?;
+ Ok(())
+ })
+ .boxed()
+ }
+
+ fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>> {
+ let git_binary = self.git_binary();
+ self.executor
+ .spawn(async move {
+ let args: Vec<OsString> = vec!["update-ref".into(), "-d".into(), ref_name.into()];
+ git_binary?.run(&args).await?;
+ Ok(())
+ })
+ .boxed()
+ }
+
+ fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>> {
+ let git_binary = self.git_binary();
+ self.executor
+ .spawn(async move {
+ let args: Vec<OsString> = vec!["worktree".into(), "repair".into()];
+ git_binary?.run(&args).await?;
+ Ok(())
+ })
+ .boxed()
+ }
+
fn push(
&self,
branch_name: String,
@@ -4009,7 +4054,7 @@ mod tests {
// Create a new worktree
repo.create_worktree(
- "test-branch".to_string(),
+ Some("test-branch".to_string()),
worktree_path.clone(),
Some("HEAD".to_string()),
)
@@ -4068,7 +4113,7 @@ mod tests {
// Create a worktree
let worktree_path = worktrees_dir.join("worktree-to-remove");
repo.create_worktree(
- "to-remove".to_string(),
+ Some("to-remove".to_string()),
worktree_path.clone(),
Some("HEAD".to_string()),
)
@@ -4092,7 +4137,7 @@ mod tests {
// Create a worktree
let worktree_path = worktrees_dir.join("dirty-wt");
repo.create_worktree(
- "dirty-wt".to_string(),
+ Some("dirty-wt".to_string()),
worktree_path.clone(),
Some("HEAD".to_string()),
)
@@ -4162,7 +4207,7 @@ mod tests {
// Create a worktree
let old_path = worktrees_dir.join("old-worktree-name");
repo.create_worktree(
- "old-name".to_string(),
+ Some("old-name".to_string()),
old_path.clone(),
Some("HEAD".to_string()),
)
@@ -24,7 +24,6 @@ anyhow.workspace = true
collections.workspace = true
db.workspace = true
editor.workspace = true
-feature_flags.workspace = true
git.workspace = true
git_ui.workspace = true
gpui.workspace = true
@@ -1,6 +1,5 @@
use collections::{BTreeMap, HashMap, IndexSet};
use editor::Editor;
-use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag};
use git::{
BuildCommitPermalinkParams, GitHostingProviderRegistry, GitRemote, Oid, ParsedGitRemote,
parse_git_remote_url,
@@ -26,7 +25,7 @@ use project::git_store::{
};
use search::{
SearchOption, SearchOptions, SearchSource, SelectNextMatch, SelectPreviousMatch,
- ToggleCaseSensitive,
+ ToggleCaseSensitive, buffer_search,
};
use settings::Settings;
use smallvec::{SmallVec, smallvec};
@@ -42,8 +41,10 @@ use theme_settings::ThemeSettings;
use time::{OffsetDateTime, UtcOffset, format_description::BorrowedFormatItem};
use ui::{
ButtonLike, Chip, ColumnWidthConfig, CommonAnimationExt as _, ContextMenu, DiffStat, Divider,
- HighlightedLabel, RedistributableColumnsState, ScrollableHandle, Table, TableInteractionState,
- TableResizeBehavior, Tooltip, WithScrollbar, prelude::*,
+ HeaderResizeInfo, HighlightedLabel, RedistributableColumnsState, ScrollableHandle, Table,
+ TableInteractionState, TableRenderContext, TableResizeBehavior, Tooltip, WithScrollbar,
+ bind_redistributable_columns, prelude::*, render_redistributable_columns_resize_handles,
+ render_table_header, table_row::TableRow,
};
use workspace::{
Workspace,
@@ -274,6 +275,8 @@ actions!(
[
/// Opens the commit view for the selected commit.
OpenCommitView,
+ /// Focuses the search field.
+ FocusSearch,
]
);
@@ -730,8 +733,7 @@ pub fn init(cx: &mut App) {
cx.observe_new(|workspace: &mut workspace::Workspace, _, _| {
workspace.register_action_renderer(|div, workspace, _, cx| {
div.when(
- workspace.project().read(cx).active_repository(cx).is_some()
- && cx.has_flag::<GitGraphFeatureFlag>(),
+ workspace.project().read(cx).active_repository(cx).is_some(),
|div| {
let workspace = workspace.weak_handle();
@@ -833,8 +835,8 @@ pub fn init(cx: &mut App) {
.detach();
}
-fn lane_center_x(bounds: Bounds<Pixels>, lane: f32, horizontal_scroll_offset: Pixels) -> Pixels {
- bounds.origin.x + LEFT_PADDING + lane * LANE_WIDTH + LANE_WIDTH / 2.0 - horizontal_scroll_offset
+fn lane_center_x(bounds: Bounds<Pixels>, lane: f32) -> Pixels {
+ bounds.origin.x + LEFT_PADDING + lane * LANE_WIDTH + LANE_WIDTH / 2.0
}
fn to_row_center(
@@ -901,9 +903,7 @@ pub struct GitGraph {
context_menu: Option<(Entity<ContextMenu>, Point<Pixels>, Subscription)>,
row_height: Pixels,
table_interaction_state: Entity<TableInteractionState>,
- table_column_widths: Entity<RedistributableColumnsState>,
- horizontal_scroll_offset: Pixels,
- graph_viewport_width: Pixels,
+ column_widths: Entity<RedistributableColumnsState>,
selected_entry_idx: Option<usize>,
hovered_entry_idx: Option<usize>,
graph_canvas_bounds: Rc<Cell<Option<Bounds<Pixels>>>>,
@@ -933,8 +933,52 @@ impl GitGraph {
font_size + px(12.0)
}
- fn graph_content_width(&self) -> Pixels {
- (LANE_WIDTH * self.graph_data.max_lanes.min(8) as f32) + LEFT_PADDING * 2.0
+ fn graph_canvas_content_width(&self) -> Pixels {
+ (LANE_WIDTH * self.graph_data.max_lanes.max(6) as f32) + LEFT_PADDING * 2.0
+ }
+
+ fn preview_column_fractions(&self, window: &Window, cx: &App) -> [f32; 5] {
+ let fractions = self
+ .column_widths
+ .read(cx)
+ .preview_fractions(window.rem_size());
+ [
+ fractions[0],
+ fractions[1],
+ fractions[2],
+ fractions[3],
+ fractions[4],
+ ]
+ }
+
+ fn table_column_width_config(&self, window: &Window, cx: &App) -> ColumnWidthConfig {
+ let [_, description, date, author, commit] = self.preview_column_fractions(window, cx);
+ let table_total = description + date + author + commit;
+
+ let widths = if table_total > 0.0 {
+ vec![
+ DefiniteLength::Fraction(description / table_total),
+ DefiniteLength::Fraction(date / table_total),
+ DefiniteLength::Fraction(author / table_total),
+ DefiniteLength::Fraction(commit / table_total),
+ ]
+ } else {
+ vec![
+ DefiniteLength::Fraction(0.25),
+ DefiniteLength::Fraction(0.25),
+ DefiniteLength::Fraction(0.25),
+ DefiniteLength::Fraction(0.25),
+ ]
+ };
+
+ ColumnWidthConfig::explicit(widths)
+ }
+
+ fn graph_viewport_width(&self, window: &Window, cx: &App) -> Pixels {
+ self.column_widths
+ .read(cx)
+ .preview_column_width(0, window)
+ .unwrap_or_else(|| self.graph_canvas_content_width())
}
pub fn new(
@@ -972,20 +1016,22 @@ impl GitGraph {
});
let table_interaction_state = cx.new(|cx| TableInteractionState::new(cx));
- let table_column_widths = cx.new(|_cx| {
+ let column_widths = cx.new(|_cx| {
RedistributableColumnsState::new(
- 4,
+ 5,
vec![
- DefiniteLength::Fraction(0.72),
- DefiniteLength::Fraction(0.12),
- DefiniteLength::Fraction(0.10),
- DefiniteLength::Fraction(0.06),
+ DefiniteLength::Fraction(0.14),
+ DefiniteLength::Fraction(0.6192),
+ DefiniteLength::Fraction(0.1032),
+ DefiniteLength::Fraction(0.086),
+ DefiniteLength::Fraction(0.0516),
],
vec![
TableResizeBehavior::Resizable,
TableResizeBehavior::Resizable,
TableResizeBehavior::Resizable,
TableResizeBehavior::Resizable,
+ TableResizeBehavior::Resizable,
],
)
});
@@ -1020,9 +1066,7 @@ impl GitGraph {
context_menu: None,
row_height,
table_interaction_state,
- table_column_widths,
- horizontal_scroll_offset: px(0.),
- graph_viewport_width: px(88.),
+ column_widths,
selected_entry_idx: None,
hovered_entry_idx: None,
graph_canvas_bounds: Rc::new(Cell::new(None)),
@@ -1104,7 +1148,7 @@ impl GitGraph {
}
}
}
- RepositoryEvent::BranchChanged => {
+ RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
self.pending_select_sha = None;
// Only invalidate if we scanned atleast once,
// meaning we are not inside the initial repo loading state
@@ -1113,6 +1157,12 @@ impl GitGraph {
self.invalidate_state(cx);
}
}
+ RepositoryEvent::StashEntriesChanged if self.log_source == LogSource::All => {
+ self.pending_select_sha = None;
+ if repository.read(cx).scan_id > 1 {
+ self.invalidate_state(cx);
+ }
+ }
RepositoryEvent::GraphEvent(_, _) => {}
_ => {}
}
@@ -2087,10 +2137,13 @@ impl GitGraph {
let first_visible_row = (scroll_offset_y / row_height).floor() as usize;
let vertical_scroll_offset = scroll_offset_y - (first_visible_row as f32 * row_height);
- let horizontal_scroll_offset = self.horizontal_scroll_offset;
- let max_lanes = self.graph_data.max_lanes.max(6);
- let graph_width = LANE_WIDTH * max_lanes as f32 + LEFT_PADDING * 2.0;
+ let graph_viewport_width = self.graph_viewport_width(window, cx);
+ let graph_width = if self.graph_canvas_content_width() > graph_viewport_width {
+ self.graph_canvas_content_width()
+ } else {
+ graph_viewport_width
+ };
let last_visible_row =
first_visible_row + (viewport_height / row_height).ceil() as usize + 1;
@@ -2158,8 +2211,7 @@ impl GitGraph {
bounds.origin.y + row_idx as f32 * row_height + row_height / 2.0
- vertical_scroll_offset;
- let commit_x =
- lane_center_x(bounds, row.lane as f32, horizontal_scroll_offset);
+ let commit_x = lane_center_x(bounds, row.lane as f32);
draw_commit_circle(commit_x, row_y_center, row_color, window);
}
@@ -2171,8 +2223,7 @@ impl GitGraph {
continue;
};
- let line_x =
- lane_center_x(bounds, start_column as f32, horizontal_scroll_offset);
+ let line_x = lane_center_x(bounds, start_column as f32);
let start_row = line.full_interval.start as i32 - first_visible_row as i32;
@@ -2188,6 +2239,8 @@ impl GitGraph {
builder.move_to(point(line_x, from_y));
let segments = &line.segments[start_segment_idx..];
+ let desired_curve_height = row_height / 3.0;
+ let desired_curve_width = LANE_WIDTH / 3.0;
for (segment_idx, segment) in segments.iter().enumerate() {
let is_last = segment_idx + 1 == segments.len();
@@ -2215,11 +2268,7 @@ impl GitGraph {
on_row,
curve_kind,
} => {
- let mut to_column = lane_center_x(
- bounds,
- *to_column as f32,
- horizontal_scroll_offset,
- );
+ let mut to_column = lane_center_x(bounds, *to_column as f32);
let mut to_row = to_row_center(
*on_row - first_visible_row,
@@ -2241,66 +2290,69 @@ impl GitGraph {
if is_last {
to_column -= column_shift;
}
- builder.move_to(point(current_column, current_row));
- if (to_column - current_column).abs() > LANE_WIDTH {
- // Multi-lane checkout: straight down, small
- // curve turn, then straight horizontal.
- if (to_row - current_row).abs() > row_height {
- let vertical_end =
- point(current_column, to_row - row_height);
- builder.line_to(vertical_end);
- builder.move_to(vertical_end);
- }
-
- let lane_shift = if going_right {
- LANE_WIDTH
- } else {
- -LANE_WIDTH
- };
- let curve_end =
- point(current_column + lane_shift, to_row);
- let curve_control = point(current_column, to_row);
- builder.curve_to(curve_end, curve_control);
- builder.move_to(curve_end);
-
- builder.line_to(point(to_column, to_row));
+ let available_curve_width =
+ (to_column - current_column).abs();
+ let available_curve_height =
+ (to_row - current_row).abs();
+ let curve_width =
+ desired_curve_width.min(available_curve_width);
+ let curve_height =
+ desired_curve_height.min(available_curve_height);
+ let signed_curve_width = if going_right {
+ curve_width
} else {
- if (to_row - current_row).abs() > row_height {
- let start_curve =
- point(current_column, to_row - row_height);
- builder.line_to(start_curve);
- builder.move_to(start_curve);
- }
- let control = point(current_column, to_row);
- builder.curve_to(point(to_column, to_row), control);
- }
+ -curve_width
+ };
+ let curve_start =
+ point(current_column, to_row - curve_height);
+ let curve_end =
+ point(current_column + signed_curve_width, to_row);
+ let curve_control = point(current_column, to_row);
+
+ builder.move_to(point(current_column, current_row));
+ builder.line_to(curve_start);
+ builder.move_to(curve_start);
+ builder.curve_to(curve_end, curve_control);
+ builder.move_to(curve_end);
+ builder.line_to(point(to_column, to_row));
}
CurveKind::Merge => {
if is_last {
to_row -= COMMIT_CIRCLE_RADIUS;
}
- builder.move_to(point(
+
+ let merge_start = point(
current_column + column_shift,
current_row - COMMIT_CIRCLE_RADIUS,
- ));
-
- if (to_column - current_column).abs() > LANE_WIDTH {
- let column_shift = if going_right {
- LANE_WIDTH
- } else {
- -LANE_WIDTH
- };
- let start_curve = point(
- current_column + column_shift,
- current_row - COMMIT_CIRCLE_RADIUS,
- );
- builder.line_to(start_curve);
- builder.move_to(start_curve);
- }
-
- let control = point(to_column, current_row);
- builder.curve_to(point(to_column, to_row), control);
+ );
+ let available_curve_width =
+ (to_column - merge_start.x).abs();
+ let available_curve_height =
+ (to_row - merge_start.y).abs();
+ let curve_width =
+ desired_curve_width.min(available_curve_width);
+ let curve_height =
+ desired_curve_height.min(available_curve_height);
+ let signed_curve_width = if going_right {
+ curve_width
+ } else {
+ -curve_width
+ };
+ let curve_start = point(
+ to_column - signed_curve_width,
+ merge_start.y,
+ );
+ let curve_end =
+ point(to_column, merge_start.y + curve_height);
+ let curve_control = point(to_column, merge_start.y);
+
+ builder.move_to(merge_start);
+ builder.line_to(curve_start);
+ builder.move_to(curve_start);
+ builder.curve_to(curve_end, curve_control);
+ builder.move_to(curve_end);
+ builder.line_to(point(to_column, to_row));
}
}
current_row = to_row;
@@ -2342,9 +2394,8 @@ impl GitGraph {
let local_y = position_y - canvas_bounds.origin.y;
if local_y >= px(0.) && local_y < canvas_bounds.size.height {
- let row_in_viewport = (local_y / self.row_height).floor() as usize;
- let scroll_rows = (scroll_offset_y / self.row_height).floor() as usize;
- let absolute_row = scroll_rows + row_in_viewport;
+ let absolute_y = local_y + scroll_offset_y;
+ let absolute_row = (absolute_y / self.row_height).floor() as usize;
if absolute_row < self.graph_data.commits.len() {
return Some(absolute_row);
@@ -2409,25 +2460,8 @@ impl GitGraph {
let new_y = (current_offset.y + delta.y).clamp(max_vertical_scroll, px(0.));
let new_offset = Point::new(current_offset.x, new_y);
- let max_lanes = self.graph_data.max_lanes.max(1);
- let graph_content_width = LANE_WIDTH * max_lanes as f32 + LEFT_PADDING * 2.0;
- let max_horizontal_scroll = (graph_content_width - self.graph_viewport_width).max(px(0.));
-
- let new_horizontal_offset =
- (self.horizontal_scroll_offset - delta.x).clamp(px(0.), max_horizontal_scroll);
-
- let vertical_changed = new_offset != current_offset;
- let horizontal_changed = new_horizontal_offset != self.horizontal_scroll_offset;
-
- if vertical_changed {
+ if new_offset != current_offset {
table_state.set_scroll_offset(new_offset);
- }
-
- if horizontal_changed {
- self.horizontal_scroll_offset = new_horizontal_offset;
- }
-
- if vertical_changed || horizontal_changed {
cx.notify();
}
}
@@ -2522,118 +2556,200 @@ impl Render for GitGraph {
this.child(self.render_loading_spinner(cx))
})
} else {
- div()
+ let header_resize_info = HeaderResizeInfo::from_state(&self.column_widths, cx);
+ let header_context = TableRenderContext::for_column_widths(
+ Some(self.column_widths.read(cx).widths_to_render()),
+ true,
+ );
+ let [
+ graph_fraction,
+ description_fraction,
+ date_fraction,
+ author_fraction,
+ commit_fraction,
+ ] = self.preview_column_fractions(window, cx);
+ let table_fraction =
+ description_fraction + date_fraction + author_fraction + commit_fraction;
+ let table_width_config = self.table_column_width_config(window, cx);
+
+ h_flex()
.size_full()
- .flex()
- .flex_row()
.child(
div()
- .w(self.graph_content_width())
- .h_full()
+ .flex_1()
+ .min_w_0()
+ .size_full()
.flex()
.flex_col()
- .child(
- div()
- .flex()
- .items_center()
- .px_1()
- .py_0p5()
- .border_b_1()
- .whitespace_nowrap()
- .border_color(cx.theme().colors().border)
- .child(Label::new("Graph").color(Color::Muted)),
- )
- .child(
- div()
- .id("graph-canvas")
- .flex_1()
- .overflow_hidden()
- .child(self.render_graph(window, cx))
- .on_scroll_wheel(cx.listener(Self::handle_graph_scroll))
- .on_mouse_move(cx.listener(Self::handle_graph_mouse_move))
- .on_click(cx.listener(Self::handle_graph_click))
- .on_hover(cx.listener(|this, &is_hovered: &bool, _, cx| {
- if !is_hovered && this.hovered_entry_idx.is_some() {
- this.hovered_entry_idx = None;
- cx.notify();
- }
- })),
- ),
- )
- .child({
- let row_height = self.row_height;
- let selected_entry_idx = self.selected_entry_idx;
- let hovered_entry_idx = self.hovered_entry_idx;
- let weak_self = cx.weak_entity();
- let focus_handle = self.focus_handle.clone();
- div().flex_1().size_full().child(
- Table::new(4)
- .interactable(&self.table_interaction_state)
- .hide_row_borders()
- .hide_row_hover()
- .header(vec![
- Label::new("Description")
- .color(Color::Muted)
- .into_any_element(),
- Label::new("Date").color(Color::Muted).into_any_element(),
- Label::new("Author").color(Color::Muted).into_any_element(),
- Label::new("Commit").color(Color::Muted).into_any_element(),
- ])
- .width_config(ColumnWidthConfig::redistributable(
- self.table_column_widths.clone(),
- ))
- .map_row(move |(index, row), window, cx| {
- let is_selected = selected_entry_idx == Some(index);
- let is_hovered = hovered_entry_idx == Some(index);
- let is_focused = focus_handle.is_focused(window);
- let weak = weak_self.clone();
- let weak_for_hover = weak.clone();
-
- let hover_bg = cx.theme().colors().element_hover.opacity(0.6);
- let selected_bg = if is_focused {
- cx.theme().colors().element_selected
- } else {
- cx.theme().colors().element_hover
- };
-
- row.h(row_height)
- .when(is_selected, |row| row.bg(selected_bg))
- .when(is_hovered && !is_selected, |row| row.bg(hover_bg))
- .on_hover(move |&is_hovered, _, cx| {
- weak_for_hover
- .update(cx, |this, cx| {
- if is_hovered {
- if this.hovered_entry_idx != Some(index) {
- this.hovered_entry_idx = Some(index);
- cx.notify();
- }
- } else if this.hovered_entry_idx == Some(index) {
- // Only clear if this row was the hovered one
- this.hovered_entry_idx = None;
- cx.notify();
- }
- })
- .ok();
- })
- .on_click(move |event, window, cx| {
- let click_count = event.click_count();
- weak.update(cx, |this, cx| {
- this.select_entry(index, ScrollStrategy::Center, cx);
- if click_count >= 2 {
- this.open_commit_view(index, window, cx);
- }
- })
- .ok();
- })
- .into_any_element()
- })
- .uniform_list(
- "git-graph-commits",
- commit_count,
- cx.processor(Self::render_table_rows),
+ .child(render_table_header(
+ TableRow::from_vec(
+ vec![
+ Label::new("Graph")
+ .color(Color::Muted)
+ .truncate()
+ .into_any_element(),
+ Label::new("Description")
+ .color(Color::Muted)
+ .into_any_element(),
+ Label::new("Date").color(Color::Muted).into_any_element(),
+ Label::new("Author").color(Color::Muted).into_any_element(),
+ Label::new("Commit").color(Color::Muted).into_any_element(),
+ ],
+ 5,
),
- )
- })
+ header_context,
+ Some(header_resize_info),
+ Some(self.column_widths.entity_id()),
+ cx,
+ ))
+ .child({
+ let row_height = self.row_height;
+ let selected_entry_idx = self.selected_entry_idx;
+ let hovered_entry_idx = self.hovered_entry_idx;
+ let weak_self = cx.weak_entity();
+ let focus_handle = self.focus_handle.clone();
+
+ bind_redistributable_columns(
+ div()
+ .relative()
+ .flex_1()
+ .w_full()
+ .overflow_hidden()
+ .child(
+ h_flex()
+ .size_full()
+ .child(
+ div()
+ .w(DefiniteLength::Fraction(graph_fraction))
+ .h_full()
+ .min_w_0()
+ .overflow_hidden()
+ .child(
+ div()
+ .id("graph-canvas")
+ .size_full()
+ .overflow_hidden()
+ .child(
+ div()
+ .size_full()
+ .child(self.render_graph(window, cx)),
+ )
+ .on_scroll_wheel(
+ cx.listener(Self::handle_graph_scroll),
+ )
+ .on_mouse_move(
+ cx.listener(Self::handle_graph_mouse_move),
+ )
+ .on_click(cx.listener(Self::handle_graph_click))
+ .on_hover(cx.listener(
+ |this, &is_hovered: &bool, _, cx| {
+ if !is_hovered
+ && this.hovered_entry_idx.is_some()
+ {
+ this.hovered_entry_idx = None;
+ cx.notify();
+ }
+ },
+ )),
+ ),
+ )
+ .child(
+ div()
+ .w(DefiniteLength::Fraction(table_fraction))
+ .h_full()
+ .min_w_0()
+ .child(
+ Table::new(4)
+ .interactable(&self.table_interaction_state)
+ .hide_row_borders()
+ .hide_row_hover()
+ .width_config(table_width_config)
+ .map_row(move |(index, row), window, cx| {
+ let is_selected =
+ selected_entry_idx == Some(index);
+ let is_hovered =
+ hovered_entry_idx == Some(index);
+ let is_focused =
+ focus_handle.is_focused(window);
+ let weak = weak_self.clone();
+ let weak_for_hover = weak.clone();
+
+ let hover_bg = cx
+ .theme()
+ .colors()
+ .element_hover
+ .opacity(0.6);
+ let selected_bg = if is_focused {
+ cx.theme().colors().element_selected
+ } else {
+ cx.theme().colors().element_hover
+ };
+
+ row.h(row_height)
+ .when(is_selected, |row| row.bg(selected_bg))
+ .when(
+ is_hovered && !is_selected,
+ |row| row.bg(hover_bg),
+ )
+ .on_hover(move |&is_hovered, _, cx| {
+ weak_for_hover
+ .update(cx, |this, cx| {
+ if is_hovered {
+ if this.hovered_entry_idx
+ != Some(index)
+ {
+ this.hovered_entry_idx =
+ Some(index);
+ cx.notify();
+ }
+ } else if this
+ .hovered_entry_idx
+ == Some(index)
+ {
+ this.hovered_entry_idx =
+ None;
+ cx.notify();
+ }
+ })
+ .ok();
+ })
+ .on_click(move |event, window, cx| {
+ let click_count = event.click_count();
+ weak.update(cx, |this, cx| {
+ this.select_entry(
+ index,
+ ScrollStrategy::Center,
+ cx,
+ );
+ if click_count >= 2 {
+ this.open_commit_view(
+ index,
+ window,
+ cx,
+ );
+ }
+ })
+ .ok();
+ })
+ .into_any_element()
+ })
+ .uniform_list(
+ "git-graph-commits",
+ commit_count,
+ cx.processor(Self::render_table_rows),
+ ),
+ ),
+ ),
+ )
+ .child(render_redistributable_columns_resize_handles(
+ &self.column_widths,
+ window,
+ cx,
+ )),
+ self.column_widths.clone(),
+ )
+ }),
+ )
.on_drag_move::<DraggedSplitHandle>(cx.listener(|this, event, window, cx| {
this.commit_details_split_state.update(cx, |state, cx| {
state.on_drag_move(event, window, cx);
@@ -2659,6 +2775,11 @@ impl Render for GitGraph {
this.open_selected_commit_view(window, cx);
}))
.on_action(cx.listener(Self::cancel))
+ .on_action(cx.listener(|this, _: &FocusSearch, window, cx| {
+ this.search_state
+ .editor
+ .update(cx, |editor, cx| editor.focus_handle(cx).focus(window, cx));
+ }))
.on_action(cx.listener(Self::select_first))
.on_action(cx.listener(Self::select_prev))
.on_action(cx.listener(Self::select_next))
@@ -2690,6 +2811,10 @@ impl Render for GitGraph {
)
.with_priority(1)
}))
+ .on_action(cx.listener(|_, _: &buffer_search::Deploy, window, cx| {
+ window.dispatch_action(Box::new(FocusSearch), cx);
+ cx.stop_propagation();
+ }))
}
}
@@ -3617,8 +3742,8 @@ mod tests {
assert!(
observed_repository_events
.iter()
- .any(|event| matches!(event, RepositoryEvent::BranchChanged)),
- "initial repository scan should emit BranchChanged"
+ .any(|event| matches!(event, RepositoryEvent::HeadChanged)),
+ "initial repository scan should emit HeadChanged"
);
let commit_count_after = repository.read_with(cx, |repo, _| {
repo.get_graph_data(crate::LogSource::default(), crate::LogOrder::default())
@@ -3729,16 +3854,227 @@ mod tests {
});
cx.run_until_parked();
- git_graph.update_in(&mut *cx, |this, window, cx| {
- this.render(window, cx);
- });
+ cx.draw(
+ point(px(0.), px(0.)),
+ gpui::size(px(1200.), px(800.)),
+ |_, _| git_graph.clone().into_any_element(),
+ );
cx.run_until_parked();
- let commit_count_after_switch_back =
+ // Verify graph data is reloaded from repository cache on switch back
+ let reloaded_commit_count =
git_graph.read_with(&*cx, |graph, _| graph.graph_data.commits.len());
assert_eq!(
- initial_commit_count, commit_count_after_switch_back,
- "graph_data should be repopulated from cache after switching back to the same repo"
+ reloaded_commit_count,
+ commits.len(),
+ "graph data should be reloaded after switching back"
+ );
+ }
+
+ #[gpui::test]
+ async fn test_graph_data_reloaded_after_stash_change(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ Path::new("/project"),
+ json!({
+ ".git": {},
+ "file.txt": "content",
+ }),
+ )
+ .await;
+
+ let initial_head = Oid::from_bytes(&[1; 20]).unwrap();
+ let initial_stash = Oid::from_bytes(&[2; 20]).unwrap();
+ let updated_head = Oid::from_bytes(&[3; 20]).unwrap();
+ let updated_stash = Oid::from_bytes(&[4; 20]).unwrap();
+
+ fs.set_graph_commits(
+ Path::new("/project/.git"),
+ vec![
+ Arc::new(InitialGraphCommitData {
+ sha: initial_head,
+ parents: smallvec![initial_stash],
+ ref_names: vec!["HEAD".into(), "refs/heads/main".into()],
+ }),
+ Arc::new(InitialGraphCommitData {
+ sha: initial_stash,
+ parents: smallvec![],
+ ref_names: vec!["refs/stash".into()],
+ }),
+ ],
+ );
+ fs.with_git_state(Path::new("/project/.git"), true, |state| {
+ state.stash_entries = git::stash::GitStash {
+ entries: vec![git::stash::StashEntry {
+ index: 0,
+ oid: initial_stash,
+ message: "initial stash".to_string(),
+ branch: Some("main".to_string()),
+ timestamp: 1,
+ }]
+ .into(),
+ };
+ })
+ .unwrap();
+
+ let project = Project::test(fs.clone(), [Path::new("/project")], cx).await;
+ cx.run_until_parked();
+
+ let repository = project.read_with(cx, |project, cx| {
+ project
+ .active_repository(cx)
+ .expect("should have a repository")
+ });
+
+ let (multi_workspace, cx) = cx.add_window_view(|window, cx| {
+ workspace::MultiWorkspace::test_new(project.clone(), window, cx)
+ });
+ let workspace_weak =
+ multi_workspace.read_with(&*cx, |multi, _| multi.workspace().downgrade());
+ let git_graph = cx.new_window_entity(|window, cx| {
+ GitGraph::new(
+ repository.read(cx).id,
+ project.read(cx).git_store().clone(),
+ workspace_weak,
+ window,
+ cx,
+ )
+ });
+ cx.run_until_parked();
+
+ let initial_shas = git_graph.read_with(&*cx, |graph, _| {
+ graph
+ .graph_data
+ .commits
+ .iter()
+ .map(|commit| commit.data.sha)
+ .collect::<Vec<_>>()
+ });
+ assert_eq!(initial_shas, vec![initial_head, initial_stash]);
+
+ fs.set_graph_commits(
+ Path::new("/project/.git"),
+ vec![
+ Arc::new(InitialGraphCommitData {
+ sha: updated_head,
+ parents: smallvec![updated_stash],
+ ref_names: vec!["HEAD".into(), "refs/heads/main".into()],
+ }),
+ Arc::new(InitialGraphCommitData {
+ sha: updated_stash,
+ parents: smallvec![],
+ ref_names: vec!["refs/stash".into()],
+ }),
+ ],
+ );
+ fs.with_git_state(Path::new("/project/.git"), true, |state| {
+ state.stash_entries = git::stash::GitStash {
+ entries: vec![git::stash::StashEntry {
+ index: 0,
+ oid: updated_stash,
+ message: "updated stash".to_string(),
+ branch: Some("main".to_string()),
+ timestamp: 1,
+ }]
+ .into(),
+ };
+ })
+ .unwrap();
+
+ project
+ .update(cx, |project, cx| project.git_scans_complete(cx))
+ .await;
+ cx.run_until_parked();
+
+ cx.draw(
+ point(px(0.), px(0.)),
+ gpui::size(px(1200.), px(800.)),
+ |_, _| git_graph.clone().into_any_element(),
);
+ cx.run_until_parked();
+
+ let reloaded_shas = git_graph.read_with(&*cx, |graph, _| {
+ graph
+ .graph_data
+ .commits
+ .iter()
+ .map(|commit| commit.data.sha)
+ .collect::<Vec<_>>()
+ });
+ assert_eq!(reloaded_shas, vec![updated_head, updated_stash]);
+ }
+
+ #[gpui::test]
+ async fn test_git_graph_row_at_position_rounding(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ Path::new("/project"),
+ serde_json::json!({
+ ".git": {},
+ "file.txt": "content",
+ }),
+ )
+ .await;
+
+ let mut rng = StdRng::seed_from_u64(42);
+ let commits = generate_random_commit_dag(&mut rng, 10, false);
+ fs.set_graph_commits(Path::new("/project/.git"), commits.clone());
+
+ let project = Project::test(fs.clone(), [Path::new("/project")], cx).await;
+ cx.run_until_parked();
+
+ let repository = project.read_with(cx, |project, cx| {
+ project
+ .active_repository(cx)
+ .expect("should have a repository")
+ });
+
+ let (multi_workspace, cx) = cx.add_window_view(|window, cx| {
+ workspace::MultiWorkspace::test_new(project.clone(), window, cx)
+ });
+
+ let workspace_weak =
+ multi_workspace.read_with(&*cx, |multi, _| multi.workspace().downgrade());
+
+ let git_graph = cx.new_window_entity(|window, cx| {
+ GitGraph::new(
+ repository.read(cx).id,
+ project.read(cx).git_store().clone(),
+ workspace_weak,
+ window,
+ cx,
+ )
+ });
+ cx.run_until_parked();
+
+ git_graph.update(cx, |graph, cx| {
+ assert!(
+ graph.graph_data.commits.len() >= 10,
+ "graph should load dummy commits"
+ );
+
+ graph.row_height = px(20.0);
+ let origin_y = px(100.0);
+ graph.graph_canvas_bounds.set(Some(Bounds {
+ origin: point(px(0.0), origin_y),
+ size: gpui::size(px(100.0), px(1000.0)),
+ }));
+
+ graph.table_interaction_state.update(cx, |state, _| {
+ state.set_scroll_offset(point(px(0.0), px(-15.0)))
+ });
+ let pos_y = origin_y + px(10.0);
+ let absolute_calc_row = graph.row_at_position(pos_y, cx);
+
+ assert_eq!(
+ absolute_calc_row,
+ Some(1),
+ "Row calculation should yield absolute row exactly"
+ );
+ });
}
}
@@ -27,7 +27,6 @@ db.workspace = true
editor.workspace = true
file_icons.workspace = true
futures.workspace = true
-feature_flags.workspace = true
fuzzy.workspace = true
git.workspace = true
gpui.workspace = true
@@ -453,6 +453,7 @@ impl CommitModal {
CommitOptions {
amend: is_amend_pending,
signoff: is_signoff_enabled,
+ allow_empty: false,
},
window,
cx,
@@ -3,7 +3,6 @@ use buffer_diff::BufferDiff;
use collections::HashMap;
use editor::display_map::{BlockPlacement, BlockProperties, BlockStyle};
use editor::{Addon, Editor, EditorEvent, ExcerptRange, MultiBuffer, multibuffer_context_lines};
-use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag};
use git::repository::{CommitDetails, CommitDiff, RepoPath, is_binary_content};
use git::status::{FileStatus, StatusCode, TrackedStatus};
use git::{
@@ -212,7 +211,7 @@ impl CommitView {
editor.insert_blocks(
[BlockProperties {
- placement: BlockPlacement::Above(editor::Anchor::min()),
+ placement: BlockPlacement::Above(editor::Anchor::Min),
height: Some(1),
style: BlockStyle::Sticky,
render: Arc::new(|_| gpui::Empty.into_any_element()),
@@ -223,7 +222,10 @@ impl CommitView {
editor
.buffer()
.read(cx)
- .buffer_anchor_to_anchor(&message_buffer, Anchor::MAX, cx)
+ .snapshot(cx)
+ .anchor_in_buffer(Anchor::max_for_buffer(
+ message_buffer.read(cx).remote_id(),
+ ))
.map(|anchor| BlockProperties {
placement: BlockPlacement::Below(anchor),
height: Some(1),
@@ -1042,21 +1044,19 @@ impl Render for CommitViewToolbar {
}),
)
.when(!is_stash, |this| {
- this.when(cx.has_flag::<GitGraphFeatureFlag>(), |this| {
- this.child(
- IconButton::new("show-in-git-graph", IconName::GitGraph)
- .icon_size(IconSize::Small)
- .tooltip(Tooltip::text("Show in Git Graph"))
- .on_click(move |_, window, cx| {
- window.dispatch_action(
- Box::new(crate::git_panel::OpenAtCommit {
- sha: sha_for_graph.clone(),
- }),
- cx,
- );
- }),
- )
- })
+ this.child(
+ IconButton::new("show-in-git-graph", IconName::GitGraph)
+ .icon_size(IconSize::Small)
+ .tooltip(Tooltip::text("Show in Git Graph"))
+ .on_click(move |_, window, cx| {
+ window.dispatch_action(
+ Box::new(crate::git_panel::OpenAtCommit {
+ sha: sha_for_graph.clone(),
+ }),
+ cx,
+ );
+ }),
+ )
.children(remote_info.map(|(provider_name, url)| {
let icon = match provider_name.as_str() {
"GitHub" => IconName::Github,
@@ -2,23 +2,23 @@ use agent_settings::AgentSettings;
use collections::{HashMap, HashSet};
use editor::{
ConflictsOurs, ConflictsOursMarker, ConflictsOuter, ConflictsTheirs, ConflictsTheirsMarker,
- Editor, EditorEvent, ExcerptId, MultiBuffer, RowHighlightOptions,
+ Editor, EditorEvent, MultiBuffer, RowHighlightOptions,
display_map::{BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId},
};
use gpui::{
- App, Context, DismissEvent, Entity, InteractiveElement as _, ParentElement as _, Subscription,
- Task, WeakEntity,
+ App, ClickEvent, Context, Empty, Entity, InteractiveElement as _, ParentElement as _,
+ Subscription, Task, WeakEntity,
};
use language::{Anchor, Buffer, BufferId};
use project::{
ConflictRegion, ConflictSet, ConflictSetUpdate, Project, ProjectItem as _,
- git_store::{GitStoreEvent, RepositoryEvent},
+ git_store::{GitStore, GitStoreEvent, RepositoryEvent},
};
use settings::Settings;
-use std::{cell::RefCell, ops::Range, rc::Rc, sync::Arc};
-use ui::{ActiveTheme, Divider, Element as _, Styled, Window, prelude::*};
+use std::{ops::Range, sync::Arc};
+use ui::{ButtonLike, Divider, Tooltip, prelude::*};
use util::{ResultExt as _, debug_panic, maybe};
-use workspace::{Workspace, notifications::simple_message_notification::MessageNotification};
+use workspace::{StatusItemView, Workspace, item::ItemHandle};
use zed_actions::agent::{
ConflictContent, ResolveConflictedFilesWithAgent, ResolveConflictsWithAgent,
};
@@ -67,62 +67,22 @@ pub fn register_editor(editor: &mut Editor, buffer: Entity<MultiBuffer>, cx: &mu
let buffers = buffer.read(cx).all_buffers();
for buffer in buffers {
- buffer_added(editor, buffer, cx);
+ buffer_ranges_updated(editor, buffer, cx);
}
cx.subscribe(&cx.entity(), |editor, _, event, cx| match event {
- EditorEvent::ExcerptsAdded { buffer, .. } => buffer_added(editor, buffer.clone(), cx),
- EditorEvent::ExcerptsExpanded { ids } => {
- let multibuffer = editor.buffer().read(cx).snapshot(cx);
- for excerpt_id in ids {
- let Some(buffer) = multibuffer.buffer_for_excerpt(*excerpt_id) else {
- continue;
- };
- let addon = editor.addon::<ConflictAddon>().unwrap();
- let Some(conflict_set) = addon.conflict_set(buffer.remote_id()).clone() else {
- return;
- };
- excerpt_for_buffer_updated(editor, conflict_set, cx);
- }
+ EditorEvent::BufferRangesUpdated { buffer, .. } => {
+ buffer_ranges_updated(editor, buffer.clone(), cx)
+ }
+ EditorEvent::BuffersRemoved { removed_buffer_ids } => {
+ buffers_removed(editor, removed_buffer_ids, cx)
}
- EditorEvent::ExcerptsRemoved {
- removed_buffer_ids, ..
- } => buffers_removed(editor, removed_buffer_ids, cx),
_ => {}
})
.detach();
}
-fn excerpt_for_buffer_updated(
- editor: &mut Editor,
- conflict_set: Entity<ConflictSet>,
- cx: &mut Context<Editor>,
-) {
- let conflicts_len = conflict_set.read(cx).snapshot().conflicts.len();
- let buffer_id = conflict_set.read(cx).snapshot().buffer_id;
- let Some(buffer_conflicts) = editor
- .addon_mut::<ConflictAddon>()
- .unwrap()
- .buffers
- .get(&buffer_id)
- else {
- return;
- };
- let addon_conflicts_len = buffer_conflicts.block_ids.len();
- conflicts_updated(
- editor,
- conflict_set,
- &ConflictSetUpdate {
- buffer_range: None,
- old_range: 0..addon_conflicts_len,
- new_range: 0..conflicts_len,
- },
- cx,
- );
-}
-
-#[ztracing::instrument(skip_all)]
-fn buffer_added(editor: &mut Editor, buffer: Entity<Buffer>, cx: &mut Context<Editor>) {
+fn buffer_ranges_updated(editor: &mut Editor, buffer: Entity<Buffer>, cx: &mut Context<Editor>) {
let Some(project) = editor.project() else {
return;
};
@@ -188,14 +148,6 @@ fn conflicts_updated(
let conflict_set = conflict_set.read(cx).snapshot();
let multibuffer = editor.buffer().read(cx);
let snapshot = multibuffer.snapshot(cx);
- let excerpts = multibuffer.excerpts_for_buffer(buffer_id, cx);
- let Some(buffer_snapshot) = excerpts
- .first()
- .and_then(|(excerpt_id, _, _)| snapshot.buffer_for_excerpt(*excerpt_id))
- else {
- return;
- };
-
let old_range = maybe!({
let conflict_addon = editor.addon_mut::<ConflictAddon>().unwrap();
let buffer_conflicts = conflict_addon.buffers.get(&buffer_id)?;
@@ -230,23 +182,7 @@ fn conflicts_updated(
let mut removed_highlighted_ranges = Vec::new();
let mut removed_block_ids = HashSet::default();
for (conflict_range, block_id) in old_conflicts {
- let Some((excerpt_id, _, _)) = excerpts.iter().find(|(_, _, range)| {
- let precedes_start = range
- .context
- .start
- .cmp(&conflict_range.start, buffer_snapshot)
- .is_le();
- let follows_end = range
- .context
- .end
- .cmp(&conflict_range.start, buffer_snapshot)
- .is_ge();
- precedes_start && follows_end
- }) else {
- continue;
- };
- let excerpt_id = *excerpt_id;
- let Some(range) = snapshot.anchor_range_in_excerpt(excerpt_id, conflict_range) else {
+ let Some(range) = snapshot.buffer_anchor_range_to_anchor_range(conflict_range) else {
continue;
};
removed_highlighted_ranges.push(range.clone());
@@ -272,26 +208,9 @@ fn conflicts_updated(
let new_conflicts = &conflict_set.conflicts[event.new_range.clone()];
let mut blocks = Vec::new();
for conflict in new_conflicts {
- let Some((excerpt_id, _, _)) = excerpts.iter().find(|(_, _, range)| {
- let precedes_start = range
- .context
- .start
- .cmp(&conflict.range.start, buffer_snapshot)
- .is_le();
- let follows_end = range
- .context
- .end
- .cmp(&conflict.range.start, buffer_snapshot)
- .is_ge();
- precedes_start && follows_end
- }) else {
- continue;
- };
- let excerpt_id = *excerpt_id;
+ update_conflict_highlighting(editor, conflict, &snapshot, cx);
- update_conflict_highlighting(editor, conflict, &snapshot, excerpt_id, cx);
-
- let Some(anchor) = snapshot.anchor_in_excerpt(excerpt_id, conflict.range.start) else {
+ let Some(anchor) = snapshot.anchor_in_excerpt(conflict.range.start) else {
continue;
};
@@ -302,7 +221,7 @@ fn conflicts_updated(
style: BlockStyle::Sticky,
render: Arc::new({
let conflict = conflict.clone();
- move |cx| render_conflict_buttons(&conflict, excerpt_id, editor_handle.clone(), cx)
+ move |cx| render_conflict_buttons(&conflict, editor_handle.clone(), cx)
}),
priority: 0,
})
@@ -328,14 +247,13 @@ fn update_conflict_highlighting(
editor: &mut Editor,
conflict: &ConflictRegion,
buffer: &editor::MultiBufferSnapshot,
- excerpt_id: editor::ExcerptId,
cx: &mut Context<Editor>,
) -> Option<()> {
log::debug!("update conflict highlighting for {conflict:?}");
- let outer = buffer.anchor_range_in_excerpt(excerpt_id, conflict.range.clone())?;
- let ours = buffer.anchor_range_in_excerpt(excerpt_id, conflict.ours.clone())?;
- let theirs = buffer.anchor_range_in_excerpt(excerpt_id, conflict.theirs.clone())?;
+ let outer = buffer.buffer_anchor_range_to_anchor_range(conflict.range.clone())?;
+ let ours = buffer.buffer_anchor_range_to_anchor_range(conflict.ours.clone())?;
+ let theirs = buffer.buffer_anchor_range_to_anchor_range(conflict.theirs.clone())?;
let ours_background = cx.theme().colors().version_control_conflict_marker_ours;
let theirs_background = cx.theme().colors().version_control_conflict_marker_theirs;
@@ -373,7 +291,6 @@ fn update_conflict_highlighting(
fn render_conflict_buttons(
conflict: &ConflictRegion,
- excerpt_id: ExcerptId,
editor: WeakEntity<Editor>,
cx: &mut BlockContext,
) -> AnyElement {
@@ -395,7 +312,6 @@ fn render_conflict_buttons(
move |_, window, cx| {
resolve_conflict(
editor.clone(),
- excerpt_id,
conflict.clone(),
vec![ours.clone()],
window,
@@ -415,7 +331,6 @@ fn render_conflict_buttons(
move |_, window, cx| {
resolve_conflict(
editor.clone(),
- excerpt_id,
conflict.clone(),
vec![theirs.clone()],
window,
@@ -436,7 +351,6 @@ fn render_conflict_buttons(
move |_, window, cx| {
resolve_conflict(
editor.clone(),
- excerpt_id,
conflict.clone(),
vec![ours.clone(), theirs.clone()],
window,
@@ -461,7 +375,7 @@ fn render_conflict_buttons(
let content = editor
.update(cx, |editor, cx| {
let multibuffer = editor.buffer().read(cx);
- let buffer_id = conflict.ours.end.buffer_id?;
+ let buffer_id = conflict.ours.end.buffer_id;
let buffer = multibuffer.buffer(buffer_id)?;
let buffer_read = buffer.read(cx);
let snapshot = buffer_read.snapshot();
@@ -519,77 +433,8 @@ fn collect_conflicted_file_paths(project: &Project, cx: &App) -> Vec<String> {
paths
}
-pub(crate) fn register_conflict_notification(
- workspace: &mut Workspace,
- cx: &mut Context<Workspace>,
-) {
- let git_store = workspace.project().read(cx).git_store().clone();
-
- let last_shown_paths: Rc<RefCell<HashSet<String>>> = Rc::new(RefCell::new(HashSet::default()));
-
- cx.subscribe(&git_store, move |workspace, _git_store, event, cx| {
- let conflicts_changed = matches!(
- event,
- GitStoreEvent::ConflictsUpdated
- | GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::StatusesChanged, _)
- );
- if !AgentSettings::get_global(cx).enabled(cx) || !conflicts_changed {
- return;
- }
- let project = workspace.project().read(cx);
- if project.is_via_collab() {
- return;
- }
-
- if workspace.is_notification_suppressed(workspace::merge_conflict_notification_id()) {
- return;
- }
-
- let paths = collect_conflicted_file_paths(project, cx);
- let notification_id = workspace::merge_conflict_notification_id();
- let current_paths_set: HashSet<String> = paths.iter().cloned().collect();
-
- if paths.is_empty() {
- last_shown_paths.borrow_mut().clear();
- workspace.dismiss_notification(¬ification_id, cx);
- } else if *last_shown_paths.borrow() != current_paths_set {
- // Only show the notification if the set of conflicted paths has changed.
- // This prevents re-showing after the user dismisses it while working on the same conflicts.
- *last_shown_paths.borrow_mut() = current_paths_set;
- let file_count = paths.len();
- workspace.show_notification(notification_id, cx, |cx| {
- cx.new(|cx| {
- let message = format!(
- "{file_count} file{} have unresolved merge conflicts",
- if file_count == 1 { "" } else { "s" }
- );
-
- MessageNotification::new(message, cx)
- .primary_message("Resolve with Agent")
- .primary_icon(IconName::ZedAssistant)
- .primary_icon_color(Color::Muted)
- .primary_on_click({
- let paths = paths.clone();
- move |window, cx| {
- window.dispatch_action(
- Box::new(ResolveConflictedFilesWithAgent {
- conflicted_file_paths: paths.clone(),
- }),
- cx,
- );
- cx.emit(DismissEvent);
- }
- })
- })
- });
- }
- })
- .detach();
-}
-
pub(crate) fn resolve_conflict(
editor: WeakEntity<Editor>,
- excerpt_id: ExcerptId,
resolved_conflict: ConflictRegion,
ranges: Vec<Range<Anchor>>,
window: &mut Window,
@@ -601,7 +446,7 @@ pub(crate) fn resolve_conflict(
let workspace = editor.workspace()?;
let project = editor.project()?.clone();
let multibuffer = editor.buffer().clone();
- let buffer_id = resolved_conflict.ours.end.buffer_id?;
+ let buffer_id = resolved_conflict.ours.end.buffer_id;
let buffer = multibuffer.read(cx).buffer(buffer_id)?;
resolved_conflict.resolve(buffer.clone(), &ranges, cx);
let conflict_addon = editor.addon_mut::<ConflictAddon>().unwrap();
@@ -620,7 +465,7 @@ pub(crate) fn resolve_conflict(
.ok()?;
let &(_, block_id) = &state.block_ids[ix];
let range =
- snapshot.anchor_range_in_excerpt(excerpt_id, resolved_conflict.range)?;
+ snapshot.buffer_anchor_range_to_anchor_range(resolved_conflict.range)?;
editor.remove_gutter_highlights::<ConflictsOuter>(vec![range.clone()], cx);
@@ -660,3 +505,171 @@ pub(crate) fn resolve_conflict(
}
})
}
+
+pub struct MergeConflictIndicator {
+ project: Entity<Project>,
+ conflicted_paths: Vec<String>,
+ last_shown_paths: HashSet<String>,
+ dismissed: bool,
+ _subscription: Subscription,
+}
+
+impl MergeConflictIndicator {
+ pub fn new(workspace: &Workspace, cx: &mut Context<Self>) -> Self {
+ let project = workspace.project().clone();
+ let git_store = project.read(cx).git_store().clone();
+
+ let subscription = cx.subscribe(&git_store, Self::on_git_store_event);
+
+ let conflicted_paths = collect_conflicted_file_paths(project.read(cx), cx);
+ let last_shown_paths: HashSet<String> = conflicted_paths.iter().cloned().collect();
+
+ Self {
+ project,
+ conflicted_paths,
+ last_shown_paths,
+ dismissed: false,
+ _subscription: subscription,
+ }
+ }
+
+ fn on_git_store_event(
+ &mut self,
+ _git_store: Entity<GitStore>,
+ event: &GitStoreEvent,
+ cx: &mut Context<Self>,
+ ) {
+ let conflicts_changed = matches!(
+ event,
+ GitStoreEvent::ConflictsUpdated
+ | GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::StatusesChanged, _)
+ );
+
+ let agent_settings = AgentSettings::get_global(cx);
+ if !agent_settings.enabled(cx)
+ || !agent_settings.show_merge_conflict_indicator
+ || !conflicts_changed
+ {
+ return;
+ }
+
+ let project = self.project.read(cx);
+ if project.is_via_collab() {
+ return;
+ }
+
+ let paths = collect_conflicted_file_paths(project, cx);
+ let current_paths_set: HashSet<String> = paths.iter().cloned().collect();
+
+ if paths.is_empty() {
+ self.conflicted_paths.clear();
+ self.last_shown_paths.clear();
+ self.dismissed = false;
+ cx.notify();
+ } else if self.last_shown_paths != current_paths_set {
+ self.last_shown_paths = current_paths_set;
+ self.conflicted_paths = paths;
+ self.dismissed = false;
+ cx.notify();
+ }
+ }
+
+ fn resolve_with_agent(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ window.dispatch_action(
+ Box::new(ResolveConflictedFilesWithAgent {
+ conflicted_file_paths: self.conflicted_paths.clone(),
+ }),
+ cx,
+ );
+ self.dismissed = true;
+ cx.notify();
+ }
+
+ fn dismiss(&mut self, _: &ClickEvent, _window: &mut Window, cx: &mut Context<Self>) {
+ self.dismissed = true;
+ cx.notify();
+ }
+}
+
+impl Render for MergeConflictIndicator {
+ fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let agent_settings = AgentSettings::get_global(cx);
+ if !agent_settings.enabled(cx)
+ || !agent_settings.show_merge_conflict_indicator
+ || self.conflicted_paths.is_empty()
+ || self.dismissed
+ {
+ return Empty.into_any_element();
+ }
+
+ let file_count = self.conflicted_paths.len();
+
+ let message: SharedString = format!(
+ "Resolve Merge Conflict{} with Agent",
+ if file_count == 1 { "" } else { "s" }
+ )
+ .into();
+
+ let tooltip_label: SharedString = format!(
+ "Found {} {} across the codebase",
+ file_count,
+ if file_count == 1 {
+ "conflict"
+ } else {
+ "conflicts"
+ }
+ )
+ .into();
+
+ let border_color = cx.theme().colors().text_accent.opacity(0.2);
+
+ h_flex()
+ .h(rems_from_px(22.))
+ .rounded_sm()
+ .border_1()
+ .border_color(border_color)
+ .child(
+ ButtonLike::new("update-button")
+ .child(
+ h_flex()
+ .h_full()
+ .gap_1()
+ .child(
+ Icon::new(IconName::GitMergeConflict)
+ .size(IconSize::Small)
+ .color(Color::Muted),
+ )
+ .child(Label::new(message).size(LabelSize::Small)),
+ )
+ .tooltip(move |_, cx| {
+ Tooltip::with_meta(
+ tooltip_label.clone(),
+ None,
+ "Click to Resolve with Agent",
+ cx,
+ )
+ })
+ .on_click(cx.listener(|this, _, window, cx| {
+ this.resolve_with_agent(window, cx);
+ })),
+ )
+ .child(
+ div().border_l_1().border_color(border_color).child(
+ IconButton::new("dismiss-merge-conflicts", IconName::Close)
+ .icon_size(IconSize::XSmall)
+ .on_click(cx.listener(Self::dismiss)),
+ ),
+ )
+ .into_any_element()
+ }
+}
+
+impl StatusItemView for MergeConflictIndicator {
+ fn set_active_pane_item(
+ &mut self,
+ _: Option<&dyn ItemHandle>,
+ _window: &mut Window,
+ _: &mut Context<Self>,
+ ) {
+ }
+}
@@ -20,7 +20,6 @@ use editor::{
actions::ExpandAllDiffHunks,
};
use editor::{EditorStyle, RewrapOptions};
-use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag};
use file_icons::FileIcons;
use futures::StreamExt as _;
use git::commit::ParsedCommitMessage;
@@ -49,7 +48,7 @@ use language_model::{
LanguageModelRequestMessage, Role,
};
use menu;
-use multi_buffer::ExcerptInfo;
+use multi_buffer::ExcerptBoundaryInfo;
use notifications::status_toast::{StatusToast, ToastIcon};
use panel::{PanelHeader, panel_button, panel_filled_button, panel_icon_button};
use project::{
@@ -781,7 +780,7 @@ impl GitPanel {
move |this, _git_store, event, window, cx| match event {
GitStoreEvent::RepositoryUpdated(
_,
- RepositoryEvent::StatusesChanged | RepositoryEvent::BranchChanged,
+ RepositoryEvent::StatusesChanged | RepositoryEvent::HeadChanged,
true,
)
| GitStoreEvent::RepositoryAdded
@@ -2156,6 +2155,7 @@ impl GitPanel {
CommitOptions {
amend: false,
signoff: self.signoff_enabled,
+ allow_empty: false,
},
window,
cx,
@@ -2196,6 +2196,7 @@ impl GitPanel {
CommitOptions {
amend: true,
signoff: self.signoff_enabled,
+ allow_empty: false,
},
window,
cx,
@@ -4455,7 +4456,11 @@ impl GitPanel {
git_panel
.update(cx, |git_panel, cx| {
git_panel.commit_changes(
- CommitOptions { amend, signoff },
+ CommitOptions {
+ amend,
+ signoff,
+ allow_empty: false,
+ },
window,
cx,
);
@@ -4529,7 +4534,6 @@ impl GitPanel {
let commit = branch.most_recent_commit.as_ref()?.clone();
let workspace = self.workspace.clone();
let this = cx.entity();
- let can_open_git_graph = cx.has_flag::<GitGraphFeatureFlag>();
Some(
h_flex()
@@ -4607,18 +4611,16 @@ impl GitPanel {
),
)
})
- .when(can_open_git_graph, |this| {
- this.child(
- panel_icon_button("git-graph-button", IconName::GitGraph)
- .icon_size(IconSize::Small)
- .tooltip(|_window, cx| {
- Tooltip::for_action("Open Git Graph", &Open, cx)
- })
- .on_click(|_, window, cx| {
- window.dispatch_action(Open.boxed_clone(), cx)
- }),
- )
- }),
+ .child(
+ panel_icon_button("git-graph-button", IconName::GitGraph)
+ .icon_size(IconSize::Small)
+ .tooltip(|_window, cx| {
+ Tooltip::for_action("Open Git Graph", &Open, cx)
+ })
+ .on_click(|_, window, cx| {
+ window.dispatch_action(Open.boxed_clone(), cx)
+ }),
+ ),
),
)
}
@@ -5754,11 +5756,12 @@ impl editor::Addon for GitPanelAddon {
fn render_buffer_header_controls(
&self,
- excerpt_info: &ExcerptInfo,
+ _excerpt_info: &ExcerptBoundaryInfo,
+ buffer: &language::BufferSnapshot,
window: &Window,
cx: &App,
) -> Option<AnyElement> {
- let file = excerpt_info.buffer.file()?;
+ let file = buffer.file()?;
let git_panel = self.workspace.upgrade()?.read(cx).panel::<GitPanel>(cx)?;
git_panel
@@ -47,6 +47,8 @@ pub mod stash_picker;
pub mod text_diff_view;
pub mod worktree_picker;
+pub use conflict_view::MergeConflictIndicator;
+
pub fn init(cx: &mut App) {
editor::set_blame_renderer(blame_ui::GitBlameRenderer, cx);
commit_view::init(cx);
@@ -62,7 +64,6 @@ pub fn init(cx: &mut App) {
git_panel::register(workspace);
repository_selector::register(workspace);
git_picker::register(workspace);
- conflict_view::register_conflict_notification(workspace, cx);
let project = workspace.project().read(cx);
if project.is_read_only(cx) {
@@ -501,9 +501,11 @@ impl ProjectDiff {
pub fn active_path(&self, cx: &App) -> Option<ProjectPath> {
let editor = self.editor.read(cx).focused_editor().read(cx);
+ let multibuffer = editor.buffer().read(cx);
let position = editor.selections.newest_anchor().head();
- let multi_buffer = editor.buffer().read(cx);
- let (_, buffer, _) = multi_buffer.excerpt_containing(position, cx)?;
+ let snapshot = multibuffer.snapshot(cx);
+ let (text_anchor, _) = snapshot.anchor_to_buffer_anchor(position)?;
+ let buffer = multibuffer.buffer(text_anchor.buffer_id)?;
let file = buffer.read(cx).file()?;
Some(ProjectPath {
@@ -516,9 +518,7 @@ impl ProjectDiff {
self.editor.update(cx, |editor, cx| {
editor.rhs_editor().update(cx, |editor, cx| {
editor.change_selections(Default::default(), window, cx, |s| {
- s.select_ranges(vec![
- multi_buffer::Anchor::min()..multi_buffer::Anchor::min(),
- ]);
+ s.select_ranges(vec![multi_buffer::Anchor::Min..multi_buffer::Anchor::Min]);
});
});
});
@@ -569,17 +569,17 @@ impl ProjectDiff {
.collect::<Vec<_>>();
if !ranges.iter().any(|range| range.start != range.end) {
selection = false;
- if let Some((excerpt_id, _, range)) = self
- .editor
- .read(cx)
- .rhs_editor()
- .read(cx)
- .active_excerpt(cx)
+ let anchor = editor.selections.newest_anchor().head();
+ if let Some((_, excerpt_range)) = snapshot.excerpt_containing(anchor..anchor)
+ && let Some(range) = snapshot
+ .anchor_in_buffer(excerpt_range.context.start)
+ .zip(snapshot.anchor_in_buffer(excerpt_range.context.end))
+ .map(|(start, end)| start..end)
{
- ranges = vec![multi_buffer::Anchor::range_in_buffer(excerpt_id, range)];
+ ranges = vec![range];
} else {
ranges = Vec::default();
- }
+ };
}
let mut has_staged_hunks = false;
let mut has_unstaged_hunks = false;
@@ -715,7 +715,7 @@ impl ProjectDiff {
let (was_empty, is_excerpt_newly_added) = self.editor.update(cx, |editor, cx| {
let was_empty = editor.rhs_editor().read(cx).buffer().read(cx).is_empty();
- let (_, is_newly_added) = editor.set_excerpts_for_path(
+ let is_newly_added = editor.update_excerpts_for_path(
path_key.clone(),
buffer,
excerpt_ranges,
@@ -735,7 +735,7 @@ impl ProjectDiff {
cx,
|selections| {
selections.select_ranges([
- multi_buffer::Anchor::min()..multi_buffer::Anchor::min()
+ multi_buffer::Anchor::Min..multi_buffer::Anchor::Min
])
},
);
@@ -785,8 +785,9 @@ impl ProjectDiff {
let mut previous_paths = this
.multibuffer
.read(cx)
- .paths()
- .cloned()
+ .snapshot(cx)
+ .buffers_with_paths()
+ .map(|(_, path_key)| path_key.clone())
.collect::<HashSet<_>>();
if let Some(repo) = repo {
@@ -877,10 +878,23 @@ impl ProjectDiff {
#[cfg(any(test, feature = "test-support"))]
pub fn excerpt_paths(&self, cx: &App) -> Vec<std::sync::Arc<util::rel_path::RelPath>> {
- self.multibuffer
+ let snapshot = self
+ .editor()
+ .read(cx)
+ .rhs_editor()
+ .read(cx)
+ .buffer()
.read(cx)
- .paths()
- .map(|key| key.path.clone())
+ .snapshot(cx);
+ snapshot
+ .excerpts()
+ .map(|excerpt| {
+ snapshot
+ .path_for_buffer(excerpt.context.start.buffer_id)
+ .unwrap()
+ .path
+ .clone()
+ })
.collect()
}
}
@@ -1937,7 +1951,7 @@ mod tests {
let snapshot = buffer_editor.snapshot(window, cx);
let snapshot = &snapshot.buffer_snapshot();
let prev_buffer_hunks = buffer_editor
- .diff_hunks_in_ranges(&[editor::Anchor::min()..editor::Anchor::max()], snapshot)
+ .diff_hunks_in_ranges(&[editor::Anchor::Min..editor::Anchor::Max], snapshot)
.collect::<Vec<_>>();
buffer_editor.git_restore(&Default::default(), window, cx);
prev_buffer_hunks
@@ -1950,7 +1964,7 @@ mod tests {
let snapshot = buffer_editor.snapshot(window, cx);
let snapshot = &snapshot.buffer_snapshot();
buffer_editor
- .diff_hunks_in_ranges(&[editor::Anchor::min()..editor::Anchor::max()], snapshot)
+ .diff_hunks_in_ranges(&[editor::Anchor::Min..editor::Anchor::Max], snapshot)
.collect::<Vec<_>>()
});
assert_eq!(new_buffer_hunks.as_slice(), &[]);
@@ -2209,9 +2223,14 @@ mod tests {
cx.update(|window, cx| {
let editor = diff.read(cx).editor.read(cx).rhs_editor().clone();
- let excerpt_ids = editor.read(cx).buffer().read(cx).excerpt_ids();
- assert_eq!(excerpt_ids.len(), 1);
- let excerpt_id = excerpt_ids[0];
+ let excerpts = editor
+ .read(cx)
+ .buffer()
+ .read(cx)
+ .snapshot(cx)
+ .excerpts()
+ .collect::<Vec<_>>();
+ assert_eq!(excerpts.len(), 1);
let buffer = editor
.read(cx)
.buffer()
@@ -2239,7 +2258,6 @@ mod tests {
resolve_conflict(
editor.downgrade(),
- excerpt_id,
snapshot.conflicts[0].clone(),
vec![ours_range],
window,
@@ -11,7 +11,7 @@ use gpui::{
AnyElement, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, FocusHandle,
Focusable, IntoElement, Render, Task, Window,
};
-use language::{self, Buffer, Point};
+use language::{self, Buffer, OffsetRangeExt, Point};
use project::Project;
use settings::Settings;
use std::{
@@ -52,36 +52,26 @@ impl TextDiffView {
let selection_data = source_editor.update(cx, |editor, cx| {
let multibuffer = editor.buffer();
- let selections = editor.selections.all::<Point>(&editor.display_snapshot(cx));
- let first_selection = selections.first()?;
-
- let (source_buffer, buffer_start, start_excerpt) = multibuffer
- .read(cx)
- .point_to_buffer_point(first_selection.start, cx)?;
- let buffer_end = multibuffer
- .read(cx)
- .point_to_buffer_point(first_selection.end, cx)
- .and_then(|(buf, pt, end_excerpt)| {
- (buf.read(cx).remote_id() == source_buffer.read(cx).remote_id()
- && end_excerpt == start_excerpt)
- .then_some(pt)
- })
- .unwrap_or(buffer_start);
+ let multibuffer_snapshot = multibuffer.read(cx).snapshot(cx);
+ let first_selection = editor.selections.newest_anchor();
- let buffer_snapshot = source_buffer.read(cx);
- let max_point = buffer_snapshot.max_point();
+ let (source_buffer, buffer_range) = multibuffer_snapshot
+ .anchor_range_to_buffer_anchor_range(first_selection.range())?;
+ let max_point = source_buffer.max_point();
+ let buffer_range = buffer_range.to_point(source_buffer);
+ let source_buffer = multibuffer.read(cx).buffer(source_buffer.remote_id())?;
- if first_selection.is_empty() {
+ if buffer_range.is_empty() {
let full_range = Point::new(0, 0)..max_point;
return Some((source_buffer, full_range));
}
- let expanded_start = Point::new(buffer_start.row, 0);
- let expanded_end = if buffer_end.column > 0 {
- let next_row = buffer_end.row + 1;
+ let expanded_start = Point::new(buffer_range.start.row, 0);
+ let expanded_end = if buffer_range.end.column > 0 {
+ let next_row = buffer_range.end.row + 1;
cmp::min(max_point, Point::new(next_row, 0))
} else {
- buffer_end
+ buffer_range.end
};
Some((source_buffer, expanded_start..expanded_end))
});
@@ -364,7 +364,7 @@ impl WorktreeListDelegate {
workspace
.update_in(cx, |workspace, window, cx| {
workspace.open_workspace_for_paths(
- OpenMode::Replace,
+ OpenMode::Activate,
vec![new_worktree_path],
window,
cx,
@@ -418,7 +418,7 @@ impl WorktreeListDelegate {
return;
};
let open_mode = if replace_current_window {
- OpenMode::Replace
+ OpenMode::Activate
} else {
OpenMode::NewWindow
};
@@ -754,7 +754,7 @@ impl PickerDelegate for WorktreeListDelegate {
if entry.is_new {
self.create_worktree(&entry.worktree.display_name(), secondary, None, window, cx);
} else {
- self.open_worktree(&entry.worktree.path, secondary, window, cx);
+ self.open_worktree(&entry.worktree.path, !secondary, window, cx);
}
cx.emit(DismissEvent);
@@ -42,23 +42,22 @@ impl UserCaretPosition {
snapshot: &MultiBufferSnapshot,
) -> Self {
let selection_end = selection.head();
- let (line, character) = if let Some((buffer_snapshot, point, _)) =
- snapshot.point_to_buffer_point(selection_end)
- {
- let line_start = Point::new(point.row, 0);
+ let (line, character) =
+ if let Some((buffer_snapshot, point)) = snapshot.point_to_buffer_point(selection_end) {
+ let line_start = Point::new(point.row, 0);
- let chars_to_last_position = buffer_snapshot
- .text_summary_for_range::<text::TextSummary, _>(line_start..point)
- .chars as u32;
- (line_start.row, chars_to_last_position)
- } else {
- let line_start = Point::new(selection_end.row, 0);
+ let chars_to_last_position = buffer_snapshot
+ .text_summary_for_range::<text::TextSummary, _>(line_start..point)
+ .chars as u32;
+ (line_start.row, chars_to_last_position)
+ } else {
+ let line_start = Point::new(selection_end.row, 0);
- let chars_to_last_position = snapshot
- .text_summary_for_range::<MBTextSummary, _>(line_start..selection_end)
- .chars as u32;
- (selection_end.row, chars_to_last_position)
- };
+ let chars_to_last_position = snapshot
+ .text_summary_for_range::<MBTextSummary, _>(line_start..selection_end)
+ .chars as u32;
+ (selection_end.row, chars_to_last_position)
+ };
Self {
line: NonZeroU32::new(line + 1).expect("added 1"),
@@ -232,7 +231,7 @@ impl Render for CursorPosition {
if let Some(editor) = workspace
.active_item(cx)
.and_then(|item| item.act_as::<Editor>(cx))
- && let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx)
+ && let Some(buffer) = editor.read(cx).active_buffer(cx)
{
workspace.toggle_modal(window, cx, |window, cx| {
crate::GoToLine::new(editor, buffer, window, cx)
@@ -63,7 +63,7 @@ impl GoToLine {
return;
};
let editor = editor_handle.read(cx);
- let Some((_, buffer, _)) = editor.active_excerpt(cx) else {
+ let Some(buffer) = editor.active_buffer(cx) else {
return;
};
workspace.update(cx, |workspace, cx| {
@@ -93,11 +93,9 @@ impl GoToLine {
let last_line = editor
.buffer()
.read(cx)
- .excerpts_for_buffer(snapshot.remote_id(), cx)
- .into_iter()
- .map(move |(_, _, range)| {
- text::ToPoint::to_point(&range.context.end, &snapshot).row
- })
+ .snapshot(cx)
+ .excerpts_for_buffer(snapshot.remote_id())
+ .map(move |range| text::ToPoint::to_point(&range.context.end, &snapshot).row)
.max()
.unwrap_or(0);
@@ -230,7 +228,7 @@ impl GoToLine {
let character = query_char.unwrap_or(0).saturating_sub(1);
let target_multi_buffer_row = MultiBufferRow(row);
- let (buffer_snapshot, target_in_buffer, _) = snapshot.point_to_buffer_point(Point::new(
+ let (buffer_snapshot, target_in_buffer) = snapshot.point_to_buffer_point(Point::new(
target_multi_buffer_row.min(snapshot.max_row()).0,
0,
))?;
@@ -72,7 +72,7 @@ struct StateInner {
scrollbar_drag_start_height: Option<Pixels>,
measuring_behavior: ListMeasuringBehavior,
pending_scroll: Option<PendingScrollFraction>,
- follow_tail: bool,
+ follow_state: FollowState,
}
/// Keeps track of a fractional scroll position within an item for restoration
@@ -84,6 +84,49 @@ struct PendingScrollFraction {
fraction: f32,
}
+/// Controls whether the list automatically follows new content at the end.
+#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
+pub enum FollowMode {
+ /// Normal scrolling — no automatic following.
+ #[default]
+ Normal,
+ /// The list should auto-scroll along with the tail, when scrolled to bottom.
+ Tail,
+}
+
+#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
+enum FollowState {
+ #[default]
+ Normal,
+ Tail {
+ is_following: bool,
+ },
+}
+
+impl FollowState {
+ fn is_following(&self) -> bool {
+ matches!(self, FollowState::Tail { is_following: true })
+ }
+
+ fn has_stopped_following(&self) -> bool {
+ matches!(
+ self,
+ FollowState::Tail {
+ is_following: false
+ }
+ )
+ }
+
+ fn start_following(&mut self) {
+ if let FollowState::Tail {
+ is_following: false,
+ } = self
+ {
+ *self = FollowState::Tail { is_following: true };
+ }
+ }
+}
+
/// Whether the list is scrolling from top to bottom or bottom to top.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum ListAlignment {
@@ -169,6 +212,7 @@ pub struct ListPrepaintState {
#[derive(Clone)]
enum ListItem {
Unmeasured {
+ size_hint: Option<Size<Pixels>>,
focus_handle: Option<FocusHandle>,
},
Measured {
@@ -186,9 +230,16 @@ impl ListItem {
}
}
+ fn size_hint(&self) -> Option<Size<Pixels>> {
+ match self {
+ ListItem::Measured { size, .. } => Some(*size),
+ ListItem::Unmeasured { size_hint, .. } => *size_hint,
+ }
+ }
+
fn focus_handle(&self) -> Option<FocusHandle> {
match self {
- ListItem::Unmeasured { focus_handle } | ListItem::Measured { focus_handle, .. } => {
+ ListItem::Unmeasured { focus_handle, .. } | ListItem::Measured { focus_handle, .. } => {
focus_handle.clone()
}
}
@@ -196,7 +247,7 @@ impl ListItem {
fn contains_focused(&self, window: &Window, cx: &App) -> bool {
match self {
- ListItem::Unmeasured { focus_handle } | ListItem::Measured { focus_handle, .. } => {
+ ListItem::Unmeasured { focus_handle, .. } | ListItem::Measured { focus_handle, .. } => {
focus_handle
.as_ref()
.is_some_and(|handle| handle.contains_focused(window, cx))
@@ -240,7 +291,7 @@ impl ListState {
scrollbar_drag_start_height: None,
measuring_behavior: ListMeasuringBehavior::default(),
pending_scroll: None,
- follow_tail: false,
+ follow_state: FollowState::default(),
})));
this.splice(0..0, item_count);
this
@@ -275,37 +326,63 @@ impl ListState {
/// Use this when item heights may have changed (e.g., font size changes)
/// but the number and identity of items remains the same.
pub fn remeasure(&self) {
- let state = &mut *self.0.borrow_mut();
+ let count = self.item_count();
+ self.remeasure_items(0..count);
+ }
- let new_items = state.items.iter().map(|item| ListItem::Unmeasured {
- focus_handle: item.focus_handle(),
- });
+ /// Mark items in `range` as needing remeasurement while preserving
+ /// the current scroll position. Unlike [`Self::splice`], this does
+ /// not change the number of items or blow away `logical_scroll_top`.
+ ///
+ /// Use this when an item's content has changed and its rendered
+ /// height may be different (e.g., streaming text, tool results
+ /// loading), but the item itself still exists at the same index.
+ pub fn remeasure_items(&self, range: Range<usize>) {
+ let state = &mut *self.0.borrow_mut();
- // If there's a `logical_scroll_top`, we need to keep track of it as a
- // `PendingScrollFraction`, so we can later preserve that scroll
- // position proportionally to the item, in case the item's height
- // changes.
+ // If the scroll-top item falls within the remeasured range,
+ // store a fractional offset so the layout can restore the
+ // proportional scroll position after the item is re-rendered
+ // at its new height.
if let Some(scroll_top) = state.logical_scroll_top {
- let mut cursor = state.items.cursor::<Count>(());
- cursor.seek(&Count(scroll_top.item_ix), Bias::Right);
+ if range.contains(&scroll_top.item_ix) {
+ let mut cursor = state.items.cursor::<Count>(());
+ cursor.seek(&Count(scroll_top.item_ix), Bias::Right);
- if let Some(item) = cursor.item() {
- if let Some(size) = item.size() {
- let fraction = if size.height.0 > 0.0 {
- (scroll_top.offset_in_item.0 / size.height.0).clamp(0.0, 1.0)
- } else {
- 0.0
- };
-
- state.pending_scroll = Some(PendingScrollFraction {
- item_ix: scroll_top.item_ix,
- fraction,
- });
+ if let Some(item) = cursor.item() {
+ if let Some(size) = item.size() {
+ let fraction = if size.height.0 > 0.0 {
+ (scroll_top.offset_in_item.0 / size.height.0).clamp(0.0, 1.0)
+ } else {
+ 0.0
+ };
+
+ state.pending_scroll = Some(PendingScrollFraction {
+ item_ix: scroll_top.item_ix,
+ fraction,
+ });
+ }
}
}
}
- state.items = SumTree::from_iter(new_items, ());
+ // Rebuild the tree, replacing items in the range with
+ // Unmeasured copies that keep their focus handles.
+ let new_items = {
+ let mut cursor = state.items.cursor::<Count>(());
+ let mut new_items = cursor.slice(&Count(range.start), Bias::Right);
+ let invalidated = cursor.slice(&Count(range.end), Bias::Right);
+ new_items.extend(
+ invalidated.iter().map(|item| ListItem::Unmeasured {
+ size_hint: item.size_hint(),
+ focus_handle: item.focus_handle(),
+ }),
+ (),
+ );
+ new_items.append(cursor.suffix(), ());
+ new_items
+ };
+ state.items = new_items;
state.measuring_behavior.reset();
}
@@ -339,7 +416,10 @@ impl ListState {
new_items.extend(
focus_handles.into_iter().map(|focus_handle| {
spliced_count += 1;
- ListItem::Unmeasured { focus_handle }
+ ListItem::Unmeasured {
+ size_hint: None,
+ focus_handle,
+ }
}),
(),
);
@@ -382,6 +462,13 @@ impl ListState {
let current_offset = self.logical_scroll_top();
let state = &mut *self.0.borrow_mut();
+
+ if distance < px(0.) {
+ if let FollowState::Tail { is_following } = &mut state.follow_state {
+ *is_following = false;
+ }
+ }
+
let mut cursor = state.items.cursor::<ListItemSummary>(());
cursor.seek(&Count(current_offset.item_ix), Bias::Right);
@@ -414,17 +501,37 @@ impl ListState {
});
}
- /// Set whether the list should automatically follow the tail (auto-scroll to the end).
- pub fn set_follow_tail(&self, follow: bool) {
- self.0.borrow_mut().follow_tail = follow;
- if follow {
- self.scroll_to_end();
+ /// Set the follow mode for the list. In `Tail` mode, the list
+ /// will auto-scroll to the end and re-engage after the user
+ /// scrolls back to the bottom. In `Normal` mode, no automatic
+ /// following occurs.
+ pub fn set_follow_mode(&self, mode: FollowMode) {
+ let state = &mut *self.0.borrow_mut();
+
+ match mode {
+ FollowMode::Normal => {
+ state.follow_state = FollowState::Normal;
+ }
+ FollowMode::Tail => {
+ state.follow_state = FollowState::Tail { is_following: true };
+ if matches!(mode, FollowMode::Tail) {
+ let item_count = state.items.summary().count;
+ state.logical_scroll_top = Some(ListOffset {
+ item_ix: item_count,
+ offset_in_item: px(0.),
+ });
+ }
+ }
}
}
- /// Returns whether the list is currently in follow-tail mode (auto-scrolling to the end).
+ /// Returns whether the list is currently actively following the
+ /// tail (snapping to the end on each layout).
pub fn is_following_tail(&self) -> bool {
- self.0.borrow().follow_tail
+ matches!(
+ self.0.borrow().follow_state,
+ FollowState::Tail { is_following: true }
+ )
}
/// Scroll the list to the given offset
@@ -436,6 +543,12 @@ impl ListState {
scroll_top.offset_in_item = px(0.);
}
+ if scroll_top.item_ix < item_count {
+ if let FollowState::Tail { is_following } = &mut state.follow_state {
+ *is_following = false;
+ }
+ }
+
state.logical_scroll_top = Some(scroll_top);
}
@@ -592,6 +705,7 @@ impl StateInner {
if self.reset {
return;
}
+
let padding = self.last_padding.unwrap_or_default();
let scroll_max =
(self.items.summary().height + padding.top + padding.bottom - height).max(px(0.));
@@ -613,8 +727,10 @@ impl StateInner {
});
}
- if self.follow_tail && delta.y > px(0.) {
- self.follow_tail = false;
+ if let FollowState::Tail { is_following } = &mut self.follow_state {
+ if delta.y > px(0.) {
+ *is_following = false;
+ }
}
if let Some(handler) = self.scroll_handler.as_mut() {
@@ -624,7 +740,10 @@ impl StateInner {
visible_range,
count: self.items.summary().count,
is_scrolled: self.logical_scroll_top.is_some(),
- is_following_tail: self.follow_tail,
+ is_following_tail: matches!(
+ self.follow_state,
+ FollowState::Tail { is_following: true }
+ ),
},
window,
cx,
@@ -715,7 +834,7 @@ impl StateInner {
let mut max_item_width = px(0.);
let mut scroll_top = self.logical_scroll_top();
- if self.follow_tail {
+ if self.follow_state.is_following() {
scroll_top = ListOffset {
item_ix: self.items.summary().count,
offset_in_item: px(0.),
@@ -868,6 +987,18 @@ impl StateInner {
new_items.append(cursor.suffix(), ());
self.items = new_items;
+ // If follow_tail mode is on but the user scrolled away
+ // (is_following is false), check whether the current scroll
+ // position has returned to the bottom.
+ if self.follow_state.has_stopped_following() {
+ let padding = self.last_padding.unwrap_or_default();
+ let total_height = self.items.summary().height + padding.top + padding.bottom;
+ let scroll_offset = self.scroll_top(&scroll_top);
+ if scroll_offset + available_height >= total_height - px(1.0) {
+ self.follow_state.start_following();
+ }
+ }
+
// If none of the visible items are focused, check if an off-screen item is focused
// and include it to be rendered after the visible items so keyboard interaction continues
// to work for it.
@@ -1004,7 +1135,7 @@ impl StateInner {
content_height - self.scrollbar_drag_start_height.unwrap_or(content_height);
let new_scroll_top = (point.y - drag_offset).abs().max(px(0.)).min(scroll_max);
- self.follow_tail = false;
+ self.follow_state = FollowState::Normal;
if self.alignment == ListAlignment::Bottom && new_scroll_top == scroll_max {
self.logical_scroll_top = None;
@@ -1152,6 +1283,7 @@ impl Element for List {
{
let new_items = SumTree::from_iter(
state.items.iter().map(|item| ListItem::Unmeasured {
+ size_hint: None,
focus_handle: item.focus_handle(),
}),
(),
@@ -1238,11 +1370,18 @@ impl sum_tree::Item for ListItem {
fn summary(&self, _: ()) -> Self::Summary {
match self {
- ListItem::Unmeasured { focus_handle } => ListItemSummary {
+ ListItem::Unmeasured {
+ size_hint,
+ focus_handle,
+ } => ListItemSummary {
count: 1,
rendered_count: 0,
unrendered_count: 1,
- height: px(0.),
+ height: if let Some(size) = size_hint {
+ size.height
+ } else {
+ px(0.)
+ },
has_focus_handles: focus_handle.is_some(),
},
ListItem::Measured {
@@ -1312,8 +1451,8 @@ mod test {
use std::rc::Rc;
use crate::{
- self as gpui, AppContext, Context, Element, IntoElement, ListState, Render, Styled,
- TestAppContext, Window, div, list, point, px, size,
+ self as gpui, AppContext, Context, Element, FollowMode, IntoElement, ListState, Render,
+ Styled, TestAppContext, Window, div, list, point, px, size,
};
#[gpui::test]
@@ -1538,7 +1677,7 @@ mod test {
})
});
- state.set_follow_tail(true);
+ state.set_follow_mode(FollowMode::Tail);
// First paint — items are 50px, total 500px, viewport 200px.
// Follow-tail should anchor to the end.
@@ -1592,7 +1731,7 @@ mod test {
}
}
- state.set_follow_tail(true);
+ state.set_follow_mode(FollowMode::Tail);
// Paint with follow-tail — scroll anchored to the bottom.
cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, cx| {
@@ -1634,7 +1773,7 @@ mod test {
let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone())));
- state.set_follow_tail(true);
+ state.set_follow_mode(FollowMode::Tail);
// Paint with follow-tail — scroll anchored to the bottom.
cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| {
@@ -1702,7 +1841,7 @@ mod test {
// Enable follow-tail — this should immediately snap the scroll anchor
// to the end, like the user just sent a prompt.
- state.set_follow_tail(true);
+ state.set_follow_mode(FollowMode::Tail);
cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| {
view.into_any_element()
@@ -1757,4 +1896,201 @@ mod test {
-scroll_offset.y, max_offset.y,
);
}
+
+ /// When the user scrolls away from the bottom during follow_tail,
+ /// follow_tail suspends. If they scroll back to the bottom, the
+ /// next paint should re-engage follow_tail using fresh measurements.
+ #[gpui::test]
+ fn test_follow_tail_reengages_when_scrolled_back_to_bottom(cx: &mut TestAppContext) {
+ let cx = cx.add_empty_window();
+
+ // 10 items × 50px = 500px total, 200px viewport.
+ let state = ListState::new(10, crate::ListAlignment::Top, px(0.));
+
+ struct TestView(ListState);
+ impl Render for TestView {
+ fn render(&mut self, _: &mut Window, _: &mut Context<Self>) -> impl IntoElement {
+ list(self.0.clone(), |_, _, _| {
+ div().h(px(50.)).w_full().into_any()
+ })
+ .w_full()
+ .h_full()
+ }
+ }
+
+ let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone())));
+
+ state.set_follow_mode(FollowMode::Tail);
+
+ cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| {
+ view.clone().into_any_element()
+ });
+ assert!(state.is_following_tail());
+
+ // Scroll up — follow_tail should suspend (not fully disengage).
+ cx.simulate_event(ScrollWheelEvent {
+ position: point(px(50.), px(100.)),
+ delta: ScrollDelta::Pixels(point(px(0.), px(50.))),
+ ..Default::default()
+ });
+ assert!(!state.is_following_tail());
+
+ // Scroll back down to the bottom.
+ cx.simulate_event(ScrollWheelEvent {
+ position: point(px(50.), px(100.)),
+ delta: ScrollDelta::Pixels(point(px(0.), px(-10000.))),
+ ..Default::default()
+ });
+
+ // After a paint, follow_tail should re-engage because the
+ // layout confirmed we're at the true bottom.
+ cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| {
+ view.clone().into_any_element()
+ });
+ assert!(
+ state.is_following_tail(),
+ "follow_tail should re-engage after scrolling back to the bottom"
+ );
+ }
+
+ /// When an item is spliced to unmeasured (0px) while follow_tail
+ /// is suspended, the re-engagement check should still work correctly
+ #[gpui::test]
+ fn test_follow_tail_reengagement_not_fooled_by_unmeasured_items(cx: &mut TestAppContext) {
+ let cx = cx.add_empty_window();
+
+ // 20 items × 50px = 1000px total, 200px viewport, 1000px
+ // overdraw so all items get measured during the follow_tail
+ // paint (matching realistic production settings).
+ let state = ListState::new(20, crate::ListAlignment::Top, px(1000.));
+
+ struct TestView(ListState);
+ impl Render for TestView {
+ fn render(&mut self, _: &mut Window, _: &mut Context<Self>) -> impl IntoElement {
+ list(self.0.clone(), |_, _, _| {
+ div().h(px(50.)).w_full().into_any()
+ })
+ .w_full()
+ .h_full()
+ }
+ }
+
+ let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone())));
+
+ state.set_follow_mode(FollowMode::Tail);
+
+ cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| {
+ view.clone().into_any_element()
+ });
+ assert!(state.is_following_tail());
+
+ // Scroll up a meaningful amount — suspends follow_tail.
+ // 20 items × 50px = 1000px. viewport 200px. scroll_max = 800px.
+ // Scrolling up 200px puts us at 600px, clearly not at bottom.
+ cx.simulate_event(ScrollWheelEvent {
+ position: point(px(50.), px(100.)),
+ delta: ScrollDelta::Pixels(point(px(0.), px(200.))),
+ ..Default::default()
+ });
+ assert!(!state.is_following_tail());
+
+ // Invalidate the last item (simulates EntryUpdated calling
+ // remeasure_items). This makes items.summary().height
+ // temporarily wrong (0px for the invalidated item).
+ state.remeasure_items(19..20);
+
+ // Paint — layout re-measures the invalidated item with its true
+ // height. The re-engagement check uses these fresh measurements.
+ // Since we scrolled 200px up from the 800px max, we're at
+ // ~600px — NOT at the bottom, so follow_tail should NOT
+ // re-engage.
+ cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| {
+ view.clone().into_any_element()
+ });
+ assert!(
+ !state.is_following_tail(),
+ "follow_tail should not falsely re-engage due to an unmeasured item \
+ reducing items.summary().height"
+ );
+ }
+
+ /// Calling `set_follow_mode(FollowState::Normal)` or dragging the scrollbar should
+ /// fully disengage follow_tail — clearing any suspended state so
+ /// follow_tail won’t auto-re-engage.
+ #[gpui::test]
+ fn test_follow_tail_suspended_state_cleared_by_explicit_actions(cx: &mut TestAppContext) {
+ let cx = cx.add_empty_window();
+
+ // 10 items × 50px = 500px total, 200px viewport.
+ let state = ListState::new(10, crate::ListAlignment::Top, px(0.)).measure_all();
+
+ struct TestView(ListState);
+ impl Render for TestView {
+ fn render(&mut self, _: &mut Window, _: &mut Context<Self>) -> impl IntoElement {
+ list(self.0.clone(), |_, _, _| {
+ div().h(px(50.)).w_full().into_any()
+ })
+ .w_full()
+ .h_full()
+ }
+ }
+
+ let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone())));
+
+ state.set_follow_mode(FollowMode::Tail);
+ // --- Part 1: set_follow_mode(FollowState::Normal) clears suspended state ---
+
+ cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| {
+ view.clone().into_any_element()
+ });
+
+ // Scroll up — suspends follow_tail.
+ cx.simulate_event(ScrollWheelEvent {
+ position: point(px(50.), px(100.)),
+ delta: ScrollDelta::Pixels(point(px(0.), px(50.))),
+ ..Default::default()
+ });
+ assert!(!state.is_following_tail());
+
+ // Scroll back to the bottom — should re-engage follow_tail.
+ cx.simulate_event(ScrollWheelEvent {
+ position: point(px(50.), px(100.)),
+ delta: ScrollDelta::Pixels(point(px(0.), px(-10000.))),
+ ..Default::default()
+ });
+
+ cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| {
+ view.clone().into_any_element()
+ });
+ assert!(
+ state.is_following_tail(),
+ "follow_tail should re-engage after scrolling back to the bottom"
+ );
+
+ // --- Part 2: scrollbar drag clears suspended state ---
+
+ cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| {
+ view.clone().into_any_element()
+ });
+
+ // Drag the scrollbar to the middle — should clear suspended state.
+ state.set_offset_from_scrollbar(point(px(0.), px(150.)));
+
+ // Scroll to the bottom.
+ cx.simulate_event(ScrollWheelEvent {
+ position: point(px(50.), px(100.)),
+ delta: ScrollDelta::Pixels(point(px(0.), px(-10000.))),
+ ..Default::default()
+ });
+
+ // Paint — should NOT re-engage because the scrollbar drag
+ // cleared the suspended state.
+ cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| {
+ view.clone().into_any_element()
+ });
+ assert!(
+ !state.is_following_tail(),
+ "follow_tail should not re-engage after scrollbar drag cleared the suspended state"
+ );
+ }
}
@@ -5496,6 +5496,8 @@ pub enum ElementId {
CodeLocation(core::panic::Location<'static>),
/// A labeled child of an element.
NamedChild(Arc<ElementId>, SharedString),
+ /// A byte array ID (used for text-anchors)
+ OpaqueId([u8; 20]),
}
impl ElementId {
@@ -5517,6 +5519,7 @@ impl Display for ElementId {
ElementId::Path(path) => write!(f, "{}", path.display())?,
ElementId::CodeLocation(location) => write!(f, "{}", location)?,
ElementId::NamedChild(id, name) => write!(f, "{}-{}", id, name)?,
+ ElementId::OpaqueId(opaque_id) => write!(f, "{:x?}", opaque_id)?,
}
Ok(())
@@ -5631,6 +5634,12 @@ impl From<&'static core::panic::Location<'static>> for ElementId {
}
}
+impl From<[u8; 20]> for ElementId {
+ fn from(opaque_id: [u8; 20]) -> Self {
+ ElementId::OpaqueId(opaque_id)
+ }
+}
+
/// A rectangle to be rendered in the window at the given position and size.
/// Passed as an argument [`Window::paint_quad`].
#[derive(Clone)]
@@ -1701,12 +1701,7 @@ impl rwh::HasWindowHandle for MacWindow {
impl rwh::HasDisplayHandle for MacWindow {
fn display_handle(&self) -> Result<rwh::DisplayHandle<'_>, rwh::HandleError> {
- // SAFETY: This is a no-op on macOS
- unsafe {
- Ok(rwh::DisplayHandle::borrow_raw(
- rwh::AppKitDisplayHandle::new().into(),
- ))
- }
+ Ok(rwh::DisplayHandle::appkit())
}
}
@@ -115,6 +115,8 @@ impl PlatformAtlas for WgpuAtlas {
if let Some(mut texture) = texture_slot.take() {
texture.decrement_ref_count();
if texture.is_unreferenced() {
+ lock.pending_uploads
+ .retain(|upload| upload.id != texture.id);
lock.storage[id.kind]
.free_list
.push(texture.id.index as usize);
@@ -228,7 +230,9 @@ impl WgpuAtlasState {
fn flush_uploads(&mut self) {
for upload in self.pending_uploads.drain(..) {
- let texture = &self.storage[upload.id];
+ let Some(texture) = self.storage.get(upload.id) else {
+ continue;
+ };
let bytes_per_pixel = texture.bytes_per_pixel();
self.queue.write_texture(
@@ -286,6 +290,15 @@ impl ops::IndexMut<AtlasTextureKind> for WgpuAtlasStorage {
}
}
+impl WgpuAtlasStorage {
+ fn get(&self, id: AtlasTextureId) -> Option<&WgpuAtlasTexture> {
+ self[id.kind]
+ .textures
+ .get(id.index as usize)
+ .and_then(|t| t.as_ref())
+ }
+}
+
impl ops::Index<AtlasTextureId> for WgpuAtlasStorage {
type Output = WgpuAtlasTexture;
fn index(&self, id: AtlasTextureId) -> &Self::Output {
@@ -341,3 +354,70 @@ impl WgpuAtlasTexture {
self.live_atlas_keys == 0
}
}
+
+#[cfg(all(test, not(target_family = "wasm")))]
+mod tests {
+ use super::*;
+ use gpui::{ImageId, RenderImageParams};
+ use pollster::block_on;
+ use std::sync::Arc;
+
+ fn test_device_and_queue() -> anyhow::Result<(Arc<wgpu::Device>, Arc<wgpu::Queue>)> {
+ block_on(async {
+ let instance = wgpu::Instance::new(wgpu::InstanceDescriptor {
+ backends: wgpu::Backends::all(),
+ flags: wgpu::InstanceFlags::default(),
+ backend_options: wgpu::BackendOptions::default(),
+ memory_budget_thresholds: wgpu::MemoryBudgetThresholds::default(),
+ display: None,
+ });
+ let adapter = instance
+ .request_adapter(&wgpu::RequestAdapterOptions {
+ power_preference: wgpu::PowerPreference::LowPower,
+ compatible_surface: None,
+ force_fallback_adapter: false,
+ })
+ .await
+ .map_err(|error| anyhow::anyhow!("failed to request adapter: {error}"))?;
+ let (device, queue) = adapter
+ .request_device(&wgpu::DeviceDescriptor {
+ label: Some("wgpu_atlas_test_device"),
+ required_features: wgpu::Features::empty(),
+ required_limits: wgpu::Limits::downlevel_defaults()
+ .using_resolution(adapter.limits())
+ .using_alignment(adapter.limits()),
+ memory_hints: wgpu::MemoryHints::MemoryUsage,
+ trace: wgpu::Trace::Off,
+ experimental_features: wgpu::ExperimentalFeatures::disabled(),
+ })
+ .await
+ .map_err(|error| anyhow::anyhow!("failed to request device: {error}"))?;
+ Ok((Arc::new(device), Arc::new(queue)))
+ })
+ }
+
+ #[test]
+ fn before_frame_skips_uploads_for_removed_texture() -> anyhow::Result<()> {
+ let (device, queue) = test_device_and_queue()?;
+
+ let atlas = WgpuAtlas::new(device, queue);
+ let key = AtlasKey::Image(RenderImageParams {
+ image_id: ImageId(1),
+ frame_index: 0,
+ });
+ let size = Size {
+ width: DevicePixels(1),
+ height: DevicePixels(1),
+ };
+ let mut build = || Ok(Some((size, Cow::Owned(vec![0, 0, 0, 255]))));
+
+ // Regression test: before the fix, this panicked in flush_uploads
+ atlas
+ .get_or_insert_with(&key, &mut build)?
+ .expect("tile should be created");
+ atlas.remove(&key);
+ atlas.before_frame();
+
+ Ok(())
+ }
+}
@@ -540,10 +540,9 @@ impl rwh::HasWindowHandle for WindowsWindow {
}
}
-// todo(windows)
impl rwh::HasDisplayHandle for WindowsWindow {
fn display_handle(&self) -> std::result::Result<rwh::DisplayHandle<'_>, rwh::HandleError> {
- unimplemented!()
+ Ok(rwh::DisplayHandle::windows())
}
}
@@ -38,7 +38,7 @@
"#ifndef"
"#include"
(preproc_directive)
-] @preproc
+] @keyword.preproc @preproc
[
"="
@@ -196,7 +196,7 @@ type: (primitive_type) @type.builtin
"#ifndef"
"#include"
(preproc_directive)
-] @preproc
+] @keyword.preproc @preproc
(comment) @comment
@@ -3,15 +3,13 @@
[
(addition)
(new_file)
-] @string
+] @string @diff.plus
-; TODO: This should eventually be `@diff.plus` with a fallback of `@string`
[
(deletion)
(old_file)
-] @keyword
+] @keyword @diff.minus
-; TODO: This should eventually be `@diff.minus` with a fallback of `@keyword`
(commit) @constant
(location) @attribute
@@ -328,26 +328,26 @@
; JSX elements
(jsx_opening_element
[
- (identifier) @type
+ (identifier) @type @tag.component.jsx
(member_expression
- object: (identifier) @type
- property: (property_identifier) @type)
+ object: (identifier) @type @tag.component.jsx
+ property: (property_identifier) @type @tag.component.jsx)
])
(jsx_closing_element
[
- (identifier) @type
+ (identifier) @type @tag.component.jsx
(member_expression
- object: (identifier) @type
- property: (property_identifier) @type)
+ object: (identifier) @type @tag.component.jsx
+ property: (property_identifier) @type @tag.component.jsx)
])
(jsx_self_closing_element
[
- (identifier) @type
+ (identifier) @type @tag.component.jsx
(member_expression
- object: (identifier) @type
- property: (property_identifier) @type)
+ object: (identifier) @type @tag.component.jsx
+ property: (property_identifier) @type @tag.component.jsx)
])
(jsx_opening_element
@@ -389,26 +389,26 @@
(jsx_opening_element
[
- (identifier) @type
+ (identifier) @type @tag.component.jsx
(member_expression
- object: (identifier) @type
- property: (property_identifier) @type)
+ object: (identifier) @type @tag.component.jsx
+ property: (property_identifier) @type @tag.component.jsx)
])
(jsx_closing_element
[
- (identifier) @type
+ (identifier) @type @tag.component.jsx
(member_expression
- object: (identifier) @type
- property: (property_identifier) @type)
+ object: (identifier) @type @tag.component.jsx
+ property: (property_identifier) @type @tag.component.jsx)
])
(jsx_self_closing_element
[
- (identifier) @type
+ (identifier) @type @tag.component.jsx
(member_expression
- object: (identifier) @type
- property: (property_identifier) @type)
+ object: (identifier) @type @tag.component.jsx
+ property: (property_identifier) @type @tag.component.jsx)
])
(jsx_opening_element
@@ -95,6 +95,7 @@ pub enum IconName {
DebugStepOver,
Diff,
DiffSplit,
+ DiffSplitAuto,
DiffUnified,
Disconnected,
Download,
@@ -1,7 +1,6 @@
use anyhow::{Result, anyhow};
use editor::{
- Bias, CompletionProvider, Editor, EditorEvent, EditorMode, ExcerptId, MinimapVisibility,
- MultiBuffer,
+ Bias, CompletionProvider, Editor, EditorEvent, EditorMode, MinimapVisibility, MultiBuffer,
};
use fuzzy::StringMatch;
use gpui::{
@@ -641,7 +640,6 @@ struct RustStyleCompletionProvider {
impl CompletionProvider for RustStyleCompletionProvider {
fn completions(
&self,
- _excerpt_id: ExcerptId,
buffer: &Entity<Buffer>,
position: Anchor,
_: editor::CompletionContext,
@@ -26,7 +26,6 @@ impl ActionCompletionProvider {
impl CompletionProvider for ActionCompletionProvider {
fn completions(
&self,
- _excerpt_id: editor::ExcerptId,
buffer: &Entity<language::Buffer>,
buffer_position: language::Anchor,
_trigger: editor::CompletionContext,
@@ -3480,7 +3480,6 @@ struct KeyContextCompletionProvider {
impl CompletionProvider for KeyContextCompletionProvider {
fn completions(
&self,
- _excerpt_id: editor::ExcerptId,
buffer: &Entity<language::Buffer>,
buffer_position: language::Anchor,
_trigger: editor::CompletionContext,
@@ -5549,11 +5549,11 @@ impl<'a> BufferChunks<'a> {
&& range.start >= capture.node.start_byte()
{
let next_capture_end = capture.node.end_byte();
- if range.start < next_capture_end {
- highlights.stack.push((
- next_capture_end,
- highlights.highlight_maps[capture.grammar_index].get(capture.index),
- ));
+ if range.start < next_capture_end
+ && let Some(capture_id) =
+ highlights.highlight_maps[capture.grammar_index].get(capture.index)
+ {
+ highlights.stack.push((next_capture_end, capture_id));
}
highlights.next_capture.take();
}
@@ -5688,9 +5688,11 @@ impl<'a> Iterator for BufferChunks<'a> {
} else {
let highlight_id =
highlights.highlight_maps[capture.grammar_index].get(capture.index);
- highlights
- .stack
- .push((capture.node.end_byte(), highlight_id));
+ if let Some(highlight_id) = highlight_id {
+ highlights
+ .stack
+ .push((capture.node.end_byte(), highlight_id));
+ }
highlights.next_capture = highlights.captures.next();
}
}
@@ -326,23 +326,17 @@ impl DiagnosticEntry<Anchor> {
}
}
-impl Default for Summary {
- fn default() -> Self {
- Self {
- start: Anchor::MIN,
- end: Anchor::MAX,
- min_start: Anchor::MAX,
- max_end: Anchor::MIN,
- count: 0,
- }
- }
-}
-
impl sum_tree::Summary for Summary {
type Context<'a> = &'a text::BufferSnapshot;
- fn zero(_cx: Self::Context<'_>) -> Self {
- Default::default()
+ fn zero(buffer: &text::BufferSnapshot) -> Self {
+ Self {
+ start: Anchor::min_for_buffer(buffer.remote_id()),
+ end: Anchor::max_for_buffer(buffer.remote_id()),
+ min_start: Anchor::max_for_buffer(buffer.remote_id()),
+ max_end: Anchor::min_for_buffer(buffer.remote_id()),
+ count: 0,
+ }
}
fn add_summary(&mut self, other: &Self, buffer: Self::Context<'_>) {
@@ -1023,9 +1023,7 @@ impl Language {
BufferChunks::new(text, range, Some((captures, highlight_maps)), false, None)
{
let end_offset = offset + chunk.text.len();
- if let Some(highlight_id) = chunk.syntax_highlight_id
- && !highlight_id.is_default()
- {
+ if let Some(highlight_id) = chunk.syntax_highlight_id {
result.push((offset..end_offset, highlight_id));
}
offset = end_offset;
@@ -1077,11 +1075,11 @@ impl Language {
#[inline]
pub fn build_highlight_map(capture_names: &[&str], theme: &SyntaxTheme) -> HighlightMap {
- HighlightMap::from_ids(capture_names.iter().map(|capture_name| {
- theme
- .highlight_id(capture_name)
- .map_or(HighlightId::default(), HighlightId)
- }))
+ HighlightMap::from_ids(
+ capture_names
+ .iter()
+ .map(|capture_name| theme.highlight_id(capture_name).map(HighlightId::new)),
+ )
}
impl LanguageScope {
@@ -1645,9 +1643,18 @@ mod tests {
];
let map = build_highlight_map(capture_names, &theme);
- assert_eq!(theme.get_capture_name(map.get(0)), Some("function"));
- assert_eq!(theme.get_capture_name(map.get(1)), Some("function.async"));
- assert_eq!(theme.get_capture_name(map.get(2)), Some("variable.builtin"));
+ assert_eq!(
+ theme.get_capture_name(map.get(0).unwrap()),
+ Some("function")
+ );
+ assert_eq!(
+ theme.get_capture_name(map.get(1).unwrap()),
+ Some("function.async")
+ );
+ assert_eq!(
+ theme.get_capture_name(map.get(2).unwrap()),
+ Some("variable.builtin")
+ );
}
#[gpui::test(iterations = 10)]
@@ -174,11 +174,11 @@ pub fn serialize_selection(selection: &Selection<Anchor>) -> proto::Selection {
id: selection.id as u64,
start: Some(proto::EditorAnchor {
anchor: Some(serialize_anchor(&selection.start)),
- excerpt_id: 0,
+ excerpt_id: None,
}),
end: Some(proto::EditorAnchor {
anchor: Some(serialize_anchor(&selection.end)),
- excerpt_id: 0,
+ excerpt_id: None,
}),
reversed: selection.reversed,
}
@@ -260,7 +260,7 @@ pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
Bias::Left => proto::Bias::Left as i32,
Bias::Right => proto::Bias::Right as i32,
},
- buffer_id: anchor.buffer_id.map(Into::into),
+ buffer_id: Some(anchor.buffer_id.into()),
}
}
@@ -498,7 +498,7 @@ pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
timestamp,
anchor.offset as u32,
bias,
- buffer_id,
+ buffer_id?,
))
}
@@ -18,7 +18,7 @@ use std::{
};
use streaming_iterator::StreamingIterator;
use sum_tree::{Bias, Dimensions, SeekTarget, SumTree};
-use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint};
+use text::{Anchor, BufferId, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint};
use tree_sitter::{
Node, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatch, QueryMatches,
QueryPredicateArg,
@@ -56,7 +56,15 @@ impl Drop for SyntaxSnapshot {
// This does allocate a new Arc, but it's cheap and avoids blocking the main thread without needing to use an `Option` or `MaybeUninit`.
let _ = DROP_TX.send(std::mem::replace(
&mut self.layers,
- SumTree::from_summary(Default::default()),
+ SumTree::from_summary(SyntaxLayerSummary {
+ min_depth: Default::default(),
+ max_depth: Default::default(),
+ // Deliberately bogus anchors, doesn't matter in this context
+ range: Anchor::min_min_range_for_buffer(BufferId::new(1).unwrap()),
+ last_layer_range: Anchor::min_min_range_for_buffer(BufferId::new(1).unwrap()),
+ last_layer_language: Default::default(),
+ contains_unknown_injections: Default::default(),
+ }),
));
}
}
@@ -588,7 +596,7 @@ impl SyntaxSnapshot {
let bounded_position = SyntaxLayerPositionBeforeChange {
position: position.clone(),
- change: changed_regions.start_position(),
+ change: changed_regions.start_position(text.remote_id()),
};
if bounded_position.cmp(cursor.start(), text).is_gt() {
let slice = cursor.slice(&bounded_position, Bias::Left);
@@ -1946,11 +1954,11 @@ impl ChangedRegion {
}
impl ChangeRegionSet {
- fn start_position(&self) -> ChangeStartPosition {
+ fn start_position(&self, buffer_id: BufferId) -> ChangeStartPosition {
self.0.first().map_or(
ChangeStartPosition {
depth: usize::MAX,
- position: Anchor::MAX,
+ position: Anchor::max_for_buffer(buffer_id),
},
|region| ChangeStartPosition {
depth: region.depth,
@@ -1999,32 +2007,28 @@ impl ChangeRegionSet {
}
}
-impl Default for SyntaxLayerSummary {
- fn default() -> Self {
+impl sum_tree::Summary for SyntaxLayerSummary {
+ type Context<'a> = &'a BufferSnapshot;
+
+ fn zero(buffer: &BufferSnapshot) -> Self {
Self {
max_depth: 0,
min_depth: 0,
- range: Anchor::MAX..Anchor::MIN,
- last_layer_range: Anchor::MIN..Anchor::MAX,
+ range: Anchor::max_for_buffer(buffer.remote_id())
+ ..Anchor::min_for_buffer(buffer.remote_id()),
+ last_layer_range: Anchor::min_for_buffer(buffer.remote_id())
+ ..Anchor::max_for_buffer(buffer.remote_id()),
last_layer_language: None,
contains_unknown_injections: false,
}
}
-}
-
-impl sum_tree::Summary for SyntaxLayerSummary {
- type Context<'a> = &'a BufferSnapshot;
-
- fn zero(_cx: &BufferSnapshot) -> Self {
- Default::default()
- }
fn add_summary(&mut self, other: &Self, buffer: Self::Context<'_>) {
if other.max_depth > self.max_depth {
self.max_depth = other.max_depth;
self.range = other.range.clone();
} else {
- if self.range == (Anchor::MAX..Anchor::MAX) {
+ if self.range.start.is_max() && self.range.end.is_max() {
self.range.start = other.range.start;
}
if other.range.end.cmp(&self.range.end, buffer).is_gt() {
@@ -275,12 +275,11 @@ impl Grammar {
}
pub fn highlight_id_for_name(&self, name: &str) -> Option<HighlightId> {
- let capture_id = self
- .highlights_config
+ self.highlights_config
.as_ref()?
.query
- .capture_index_for_name(name)?;
- Some(self.highlight_map.lock().get(capture_id))
+ .capture_index_for_name(name)
+ .and_then(|capture_id| self.highlight_map.lock().get(capture_id))
}
pub fn debug_variables_config(&self) -> Option<&DebugVariablesConfig> {
@@ -1,35 +1,35 @@
-use std::sync::Arc;
+use std::{num::NonZeroU32, sync::Arc};
#[derive(Clone, Debug)]
-pub struct HighlightMap(Arc<[HighlightId]>);
+pub struct HighlightMap(Arc<[Option<HighlightId>]>);
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-pub struct HighlightId(pub u32);
+pub struct HighlightId(NonZeroU32);
-const DEFAULT_SYNTAX_HIGHLIGHT_ID: HighlightId = HighlightId(u32::MAX);
+impl HighlightId {
+ pub const TABSTOP_INSERT_ID: HighlightId = HighlightId(NonZeroU32::new(u32::MAX - 1).unwrap());
+ pub const TABSTOP_REPLACE_ID: HighlightId = HighlightId(NonZeroU32::new(u32::MAX - 2).unwrap());
-impl HighlightMap {
- #[inline]
- pub fn from_ids(highlight_ids: impl IntoIterator<Item = HighlightId>) -> Self {
- Self(highlight_ids.into_iter().collect())
+ pub fn new(capture_id: u32) -> Self {
+ Self(NonZeroU32::new(capture_id + 1).unwrap_or(NonZeroU32::MAX))
}
+}
- #[inline]
- pub fn get(&self, capture_id: u32) -> HighlightId {
- self.0
- .get(capture_id as usize)
- .copied()
- .unwrap_or(DEFAULT_SYNTAX_HIGHLIGHT_ID)
+impl From<HighlightId> for usize {
+ fn from(value: HighlightId) -> Self {
+ value.0.get() as usize - 1
}
}
-impl HighlightId {
- pub const TABSTOP_INSERT_ID: HighlightId = HighlightId(u32::MAX - 1);
- pub const TABSTOP_REPLACE_ID: HighlightId = HighlightId(u32::MAX - 2);
+impl HighlightMap {
+ #[inline]
+ pub fn from_ids(highlight_ids: impl IntoIterator<Item = Option<HighlightId>>) -> Self {
+ Self(highlight_ids.into_iter().collect())
+ }
#[inline]
- pub fn is_default(&self) -> bool {
- *self == DEFAULT_SYNTAX_HIGHLIGHT_ID
+ pub fn get(&self, capture_id: u32) -> Option<HighlightId> {
+ self.0.get(capture_id as usize).copied().flatten()
}
}
@@ -38,15 +38,3 @@ impl Default for HighlightMap {
Self(Arc::new([]))
}
}
-
-impl Default for HighlightId {
- fn default() -> Self {
- DEFAULT_SYNTAX_HIGHLIGHT_ID
- }
-}
-
-impl From<HighlightId> for usize {
- fn from(value: HighlightId) -> Self {
- value.0 as usize
- }
-}
@@ -684,7 +684,7 @@ fn test_build_code_label() {
);
let code_runs = code_ranges
.into_iter()
- .map(|range| (range, HighlightId(0)))
+ .map(|range| (range, HighlightId::new(0)))
.collect::<Vec<_>>();
let label = build_code_label(
@@ -707,7 +707,7 @@ fn test_build_code_label() {
marked_text_ranges("pqrs.tuv: «fn»(«Bcd»(«Efgh»)) -> «Ijklm»", false);
let label_runs = label_ranges
.into_iter()
- .map(|range| (range, HighlightId(0)))
+ .map(|range| (range, HighlightId::new(0)))
.collect::<Vec<_>>();
assert_eq!(
@@ -723,7 +723,7 @@ fn test_build_code_label_with_invalid_ranges() {
let (code, code_ranges) = marked_text_ranges("const «a»: «B» = '🏀'", false);
let code_runs = code_ranges
.into_iter()
- .map(|range| (range, HighlightId(0)))
+ .map(|range| (range, HighlightId::new(0)))
.collect::<Vec<_>>();
// A span uses a code range that is invalid because it starts inside of
@@ -20,11 +20,11 @@ anthropic = { workspace = true, features = ["schemars"] }
anyhow.workspace = true
credentials_provider.workspace = true
base64.workspace = true
-client.workspace = true
cloud_api_client.workspace = true
cloud_api_types.workspace = true
cloud_llm_client.workspace = true
collections.workspace = true
+env_var.workspace = true
futures.workspace = true
gpui.workspace = true
http_client.workspace = true
@@ -40,7 +40,6 @@ serde_json.workspace = true
smol.workspace = true
thiserror.workspace = true
util.workspace = true
-zed_env_vars.workspace = true
[dev-dependencies]
gpui = { workspace = true, features = ["test-support"] }
@@ -1,5 +1,6 @@
use anyhow::{Result, anyhow};
use credentials_provider::CredentialsProvider;
+use env_var::EnvVar;
use futures::{FutureExt, future};
use gpui::{AsyncApp, Context, SharedString, Task};
use std::{
@@ -7,7 +8,6 @@ use std::{
sync::Arc,
};
use util::ResultExt as _;
-use zed_env_vars::EnvVar;
use crate::AuthenticateError;
@@ -101,6 +101,7 @@ impl ApiKeyState {
url: SharedString,
key: Option<String>,
get_this: impl Fn(&mut Ent) -> &mut Self + 'static,
+ provider: Arc<dyn CredentialsProvider>,
cx: &Context<Ent>,
) -> Task<Result<()>> {
if self.is_from_env_var() {
@@ -108,18 +109,14 @@ impl ApiKeyState {
"bug: attempted to store API key in system keychain when API key is from env var",
)));
}
- let credentials_provider = <dyn CredentialsProvider>::global(cx);
cx.spawn(async move |ent, cx| {
if let Some(key) = &key {
- credentials_provider
+ provider
.write_credentials(&url, "Bearer", key.as_bytes(), cx)
.await
.log_err();
} else {
- credentials_provider
- .delete_credentials(&url, cx)
- .await
- .log_err();
+ provider.delete_credentials(&url, cx).await.log_err();
}
ent.update(cx, |ent, cx| {
let this = get_this(ent);
@@ -144,12 +141,13 @@ impl ApiKeyState {
&mut self,
url: SharedString,
get_this: impl Fn(&mut Ent) -> &mut Self + Clone + 'static,
+ provider: Arc<dyn CredentialsProvider>,
cx: &mut Context<Ent>,
) {
if url != self.url {
if !self.is_from_env_var() {
// loading will continue even though this result task is dropped
- let _task = self.load_if_needed(url, get_this, cx);
+ let _task = self.load_if_needed(url, get_this, provider, cx);
}
}
}
@@ -163,6 +161,7 @@ impl ApiKeyState {
&mut self,
url: SharedString,
get_this: impl Fn(&mut Ent) -> &mut Self + Clone + 'static,
+ provider: Arc<dyn CredentialsProvider>,
cx: &mut Context<Ent>,
) -> Task<Result<(), AuthenticateError>> {
if let LoadStatus::Loaded { .. } = &self.load_status
@@ -185,7 +184,7 @@ impl ApiKeyState {
let task = if let Some(load_task) = &self.load_task {
load_task.clone()
} else {
- let load_task = Self::load(url.clone(), get_this.clone(), cx).shared();
+ let load_task = Self::load(url.clone(), get_this.clone(), provider, cx).shared();
self.url = url;
self.load_status = LoadStatus::NotPresent;
self.load_task = Some(load_task.clone());
@@ -206,14 +205,13 @@ impl ApiKeyState {
fn load<Ent: 'static>(
url: SharedString,
get_this: impl Fn(&mut Ent) -> &mut Self + 'static,
+ provider: Arc<dyn CredentialsProvider>,
cx: &Context<Ent>,
) -> Task<()> {
- let credentials_provider = <dyn CredentialsProvider>::global(cx);
cx.spawn({
async move |ent, cx| {
let load_status =
- ApiKey::load_from_system_keychain_impl(&url, credentials_provider.as_ref(), cx)
- .await;
+ ApiKey::load_from_system_keychain_impl(&url, provider.as_ref(), cx).await;
ent.update(cx, |ent, cx| {
let this = get_this(ent);
this.url = url;
@@ -125,6 +125,7 @@ pub struct FakeLanguageModel {
>,
forbid_requests: AtomicBool,
supports_thinking: AtomicBool,
+ supports_streaming_tools: AtomicBool,
}
impl Default for FakeLanguageModel {
@@ -137,6 +138,7 @@ impl Default for FakeLanguageModel {
current_completion_txs: Mutex::new(Vec::new()),
forbid_requests: AtomicBool::new(false),
supports_thinking: AtomicBool::new(false),
+ supports_streaming_tools: AtomicBool::new(false),
}
}
}
@@ -169,6 +171,10 @@ impl FakeLanguageModel {
self.supports_thinking.store(supports, SeqCst);
}
+ pub fn set_supports_streaming_tools(&self, supports: bool) {
+ self.supports_streaming_tools.store(supports, SeqCst);
+ }
+
pub fn pending_completions(&self) -> Vec<LanguageModelRequest> {
self.current_completion_txs
.lock()
@@ -282,6 +288,10 @@ impl LanguageModel for FakeLanguageModel {
self.supports_thinking.load(SeqCst)
}
+ fn supports_streaming_tools(&self) -> bool {
+ self.supports_streaming_tools.load(SeqCst)
+ }
+
fn telemetry_id(&self) -> String {
"fake".to_string()
}
@@ -11,12 +11,10 @@ pub mod tool_schema;
pub mod fake_provider;
use anyhow::{Result, anyhow};
-use client::Client;
-use client::UserStore;
use cloud_llm_client::CompletionRequestStatus;
use futures::FutureExt;
use futures::{StreamExt, future::BoxFuture, stream::BoxStream};
-use gpui::{AnyView, App, AsyncApp, Entity, SharedString, Task, Window};
+use gpui::{AnyView, App, AsyncApp, SharedString, Task, Window};
use http_client::{StatusCode, http};
use icons::IconName;
use parking_lot::Mutex;
@@ -36,15 +34,10 @@ pub use crate::registry::*;
pub use crate::request::*;
pub use crate::role::*;
pub use crate::tool_schema::LanguageModelToolSchemaFormat;
+pub use env_var::{EnvVar, env_var};
pub use provider::*;
-pub use zed_env_vars::{EnvVar, env_var};
-pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) {
- init_settings(cx);
- RefreshLlmTokenListener::register(client, user_store, cx);
-}
-
-pub fn init_settings(cx: &mut App) {
+pub fn init(cx: &mut App) {
registry::init(cx);
}
@@ -1,16 +1,9 @@
use std::fmt;
use std::sync::Arc;
-use anyhow::{Context as _, Result};
-use client::Client;
-use client::UserStore;
use cloud_api_client::ClientApiError;
+use cloud_api_client::CloudApiClient;
use cloud_api_types::OrganizationId;
-use cloud_api_types::websocket_protocol::MessageToClient;
-use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME};
-use gpui::{
- App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _, Subscription,
-};
use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard};
use thiserror::Error;
@@ -30,18 +23,12 @@ impl fmt::Display for PaymentRequiredError {
pub struct LlmApiToken(Arc<RwLock<Option<String>>>);
impl LlmApiToken {
- pub fn global(cx: &App) -> Self {
- RefreshLlmTokenListener::global(cx)
- .read(cx)
- .llm_api_token
- .clone()
- }
-
pub async fn acquire(
&self,
- client: &Arc<Client>,
+ client: &CloudApiClient,
+ system_id: Option<String>,
organization_id: Option<OrganizationId>,
- ) -> Result<String> {
+ ) -> Result<String, ClientApiError> {
let lock = self.0.upgradable_read().await;
if let Some(token) = lock.as_ref() {
Ok(token.to_string())
@@ -49,6 +36,7 @@ impl LlmApiToken {
Self::fetch(
RwLockUpgradableReadGuard::upgrade(lock).await,
client,
+ system_id,
organization_id,
)
.await
@@ -57,10 +45,11 @@ impl LlmApiToken {
pub async fn refresh(
&self,
- client: &Arc<Client>,
+ client: &CloudApiClient,
+ system_id: Option<String>,
organization_id: Option<OrganizationId>,
- ) -> Result<String> {
- Self::fetch(self.0.write().await, client, organization_id).await
+ ) -> Result<String, ClientApiError> {
+ Self::fetch(self.0.write().await, client, system_id, organization_id).await
}
/// Clears the existing token before attempting to fetch a new one.
@@ -69,28 +58,22 @@ impl LlmApiToken {
/// leave a token for the wrong organization.
pub async fn clear_and_refresh(
&self,
- client: &Arc<Client>,
+ client: &CloudApiClient,
+ system_id: Option<String>,
organization_id: Option<OrganizationId>,
- ) -> Result<String> {
+ ) -> Result<String, ClientApiError> {
let mut lock = self.0.write().await;
*lock = None;
- Self::fetch(lock, client, organization_id).await
+ Self::fetch(lock, client, system_id, organization_id).await
}
async fn fetch(
mut lock: RwLockWriteGuard<'_, Option<String>>,
- client: &Arc<Client>,
+ client: &CloudApiClient,
+ system_id: Option<String>,
organization_id: Option<OrganizationId>,
- ) -> Result<String> {
- let system_id = client
- .telemetry()
- .system_id()
- .map(|system_id| system_id.to_string());
-
- let result = client
- .cloud_client()
- .create_llm_token(system_id, organization_id)
- .await;
+ ) -> Result<String, ClientApiError> {
+ let result = client.create_llm_token(system_id, organization_id).await;
match result {
Ok(response) => {
*lock = Some(response.token.0.clone());
@@ -98,112 +81,7 @@ impl LlmApiToken {
}
Err(err) => {
*lock = None;
- match err {
- ClientApiError::Unauthorized => {
- client.request_sign_out();
- Err(err).context("Failed to create LLM token")
- }
- ClientApiError::Other(err) => Err(err),
- }
- }
- }
- }
-}
-
-pub trait NeedsLlmTokenRefresh {
- /// Returns whether the LLM token needs to be refreshed.
- fn needs_llm_token_refresh(&self) -> bool;
-}
-
-impl NeedsLlmTokenRefresh for http_client::Response<http_client::AsyncBody> {
- fn needs_llm_token_refresh(&self) -> bool {
- self.headers().get(EXPIRED_LLM_TOKEN_HEADER_NAME).is_some()
- || self.headers().get(OUTDATED_LLM_TOKEN_HEADER_NAME).is_some()
- }
-}
-
-enum TokenRefreshMode {
- Refresh,
- ClearAndRefresh,
-}
-
-struct GlobalRefreshLlmTokenListener(Entity<RefreshLlmTokenListener>);
-
-impl Global for GlobalRefreshLlmTokenListener {}
-
-pub struct LlmTokenRefreshedEvent;
-
-pub struct RefreshLlmTokenListener {
- client: Arc<Client>,
- user_store: Entity<UserStore>,
- llm_api_token: LlmApiToken,
- _subscription: Subscription,
-}
-
-impl EventEmitter<LlmTokenRefreshedEvent> for RefreshLlmTokenListener {}
-
-impl RefreshLlmTokenListener {
- pub fn register(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
- let listener = cx.new(|cx| RefreshLlmTokenListener::new(client, user_store, cx));
- cx.set_global(GlobalRefreshLlmTokenListener(listener));
- }
-
- pub fn global(cx: &App) -> Entity<Self> {
- GlobalRefreshLlmTokenListener::global(cx).0.clone()
- }
-
- fn new(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut Context<Self>) -> Self {
- client.add_message_to_client_handler({
- let this = cx.weak_entity();
- move |message, cx| {
- if let Some(this) = this.upgrade() {
- Self::handle_refresh_llm_token(this, message, cx);
- }
- }
- });
-
- let subscription = cx.subscribe(&user_store, |this, _user_store, event, cx| {
- if matches!(event, client::user::Event::OrganizationChanged) {
- this.refresh(TokenRefreshMode::ClearAndRefresh, cx);
- }
- });
-
- Self {
- client,
- user_store,
- llm_api_token: LlmApiToken::default(),
- _subscription: subscription,
- }
- }
-
- fn refresh(&self, mode: TokenRefreshMode, cx: &mut Context<Self>) {
- let client = self.client.clone();
- let llm_api_token = self.llm_api_token.clone();
- let organization_id = self
- .user_store
- .read(cx)
- .current_organization()
- .map(|organization| organization.id.clone());
- cx.spawn(async move |this, cx| {
- match mode {
- TokenRefreshMode::Refresh => {
- llm_api_token.refresh(&client, organization_id).await?;
- }
- TokenRefreshMode::ClearAndRefresh => {
- llm_api_token
- .clear_and_refresh(&client, organization_id)
- .await?;
- }
- }
- this.update(cx, |_this, cx| cx.emit(LlmTokenRefreshedEvent))
- })
- .detach_and_log_err(cx);
- }
-
- fn handle_refresh_llm_token(this: Entity<Self>, message: &MessageToClient, cx: &mut App) {
- match message {
- MessageToClient::UserUpdated => {
- this.update(cx, |this, cx| this.refresh(TokenRefreshMode::Refresh, cx));
+ Err(err)
}
}
}
@@ -3,6 +3,7 @@ use std::sync::Arc;
use ::settings::{Settings, SettingsStore};
use client::{Client, UserStore};
use collections::HashSet;
+use credentials_provider::CredentialsProvider;
use gpui::{App, Context, Entity};
use language_model::{LanguageModelProviderId, LanguageModelRegistry};
use provider::deepseek::DeepSeekLanguageModelProvider;
@@ -31,9 +32,16 @@ use crate::provider::x_ai::XAiLanguageModelProvider;
pub use crate::settings::*;
pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) {
+ let credentials_provider = client.credentials_provider();
let registry = LanguageModelRegistry::global(cx);
registry.update(cx, |registry, cx| {
- register_language_model_providers(registry, user_store, client.clone(), cx);
+ register_language_model_providers(
+ registry,
+ user_store,
+ client.clone(),
+ credentials_provider.clone(),
+ cx,
+ );
});
// Subscribe to extension store events to track LLM extension installations
@@ -104,6 +112,7 @@ pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) {
&HashSet::default(),
&openai_compatible_providers,
client.clone(),
+ credentials_provider.clone(),
cx,
);
});
@@ -124,6 +133,7 @@ pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) {
&openai_compatible_providers,
&openai_compatible_providers_new,
client.clone(),
+ credentials_provider.clone(),
cx,
);
});
@@ -138,6 +148,7 @@ fn register_openai_compatible_providers(
old: &HashSet<Arc<str>>,
new: &HashSet<Arc<str>>,
client: Arc<Client>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
cx: &mut Context<LanguageModelRegistry>,
) {
for provider_id in old {
@@ -152,6 +163,7 @@ fn register_openai_compatible_providers(
Arc::new(OpenAiCompatibleLanguageModelProvider::new(
provider_id.clone(),
client.http_client(),
+ credentials_provider.clone(),
cx,
)),
cx,
@@ -164,6 +176,7 @@ fn register_language_model_providers(
registry: &mut LanguageModelRegistry,
user_store: Entity<UserStore>,
client: Arc<Client>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
cx: &mut Context<LanguageModelRegistry>,
) {
registry.register_provider(
@@ -177,62 +190,105 @@ fn register_language_model_providers(
registry.register_provider(
Arc::new(AnthropicLanguageModelProvider::new(
client.http_client(),
+ credentials_provider.clone(),
cx,
)),
cx,
);
registry.register_provider(
- Arc::new(OpenAiLanguageModelProvider::new(client.http_client(), cx)),
+ Arc::new(OpenAiLanguageModelProvider::new(
+ client.http_client(),
+ credentials_provider.clone(),
+ cx,
+ )),
cx,
);
registry.register_provider(
- Arc::new(OllamaLanguageModelProvider::new(client.http_client(), cx)),
+ Arc::new(OllamaLanguageModelProvider::new(
+ client.http_client(),
+ credentials_provider.clone(),
+ cx,
+ )),
cx,
);
registry.register_provider(
- Arc::new(LmStudioLanguageModelProvider::new(client.http_client(), cx)),
+ Arc::new(LmStudioLanguageModelProvider::new(
+ client.http_client(),
+ credentials_provider.clone(),
+ cx,
+ )),
cx,
);
registry.register_provider(
- Arc::new(DeepSeekLanguageModelProvider::new(client.http_client(), cx)),
+ Arc::new(DeepSeekLanguageModelProvider::new(
+ client.http_client(),
+ credentials_provider.clone(),
+ cx,
+ )),
cx,
);
registry.register_provider(
- Arc::new(GoogleLanguageModelProvider::new(client.http_client(), cx)),
+ Arc::new(GoogleLanguageModelProvider::new(
+ client.http_client(),
+ credentials_provider.clone(),
+ cx,
+ )),
cx,
);
registry.register_provider(
- MistralLanguageModelProvider::global(client.http_client(), cx),
+ MistralLanguageModelProvider::global(
+ client.http_client(),
+ credentials_provider.clone(),
+ cx,
+ ),
cx,
);
registry.register_provider(
- Arc::new(BedrockLanguageModelProvider::new(client.http_client(), cx)),
+ Arc::new(BedrockLanguageModelProvider::new(
+ client.http_client(),
+ credentials_provider.clone(),
+ cx,
+ )),
cx,
);
registry.register_provider(
Arc::new(OpenRouterLanguageModelProvider::new(
client.http_client(),
+ credentials_provider.clone(),
cx,
)),
cx,
);
registry.register_provider(
- Arc::new(VercelLanguageModelProvider::new(client.http_client(), cx)),
+ Arc::new(VercelLanguageModelProvider::new(
+ client.http_client(),
+ credentials_provider.clone(),
+ cx,
+ )),
cx,
);
registry.register_provider(
Arc::new(VercelAiGatewayLanguageModelProvider::new(
client.http_client(),
+ credentials_provider.clone(),
cx,
)),
cx,
);
registry.register_provider(
- Arc::new(XAiLanguageModelProvider::new(client.http_client(), cx)),
+ Arc::new(XAiLanguageModelProvider::new(
+ client.http_client(),
+ credentials_provider.clone(),
+ cx,
+ )),
cx,
);
registry.register_provider(
- Arc::new(OpenCodeLanguageModelProvider::new(client.http_client(), cx)),
+ Arc::new(OpenCodeLanguageModelProvider::new(
+ client.http_client(),
+ credentials_provider,
+ cx,
+ )),
cx,
);
registry.register_provider(Arc::new(CopilotChatLanguageModelProvider::new(cx)), cx);
@@ -6,6 +6,7 @@ use anthropic::{
};
use anyhow::Result;
use collections::{BTreeMap, HashMap};
+use credentials_provider::CredentialsProvider;
use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream};
use gpui::{AnyView, App, AsyncApp, Context, Entity, Task};
use http_client::HttpClient;
@@ -51,6 +52,7 @@ static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
pub struct State {
api_key_state: ApiKeyState,
+ credentials_provider: Arc<dyn CredentialsProvider>,
}
impl State {
@@ -59,30 +61,51 @@ impl State {
}
fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = AnthropicLanguageModelProvider::api_url(cx);
- self.api_key_state
- .store(api_url, api_key, |this| &mut this.api_key_state, cx)
+ self.api_key_state.store(
+ api_url,
+ api_key,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = AnthropicLanguageModelProvider::api_url(cx);
- self.api_key_state
- .load_if_needed(api_url, |this| &mut this.api_key_state, cx)
+ self.api_key_state.load_if_needed(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
}
impl AnthropicLanguageModelProvider {
- pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+ pub fn new(
+ http_client: Arc<dyn HttpClient>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
+ cx: &mut App,
+ ) -> Self {
let state = cx.new(|cx| {
cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
+ let credentials_provider = this.credentials_provider.clone();
let api_url = Self::api_url(cx);
- this.api_key_state
- .handle_url_change(api_url, |this| &mut this.api_key_state, cx);
+ this.api_key_state.handle_url_change(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
cx.notify();
})
.detach();
State {
api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
+ credentials_provider,
}
});
@@ -195,12 +195,13 @@ pub struct State {
settings: Option<AmazonBedrockSettings>,
/// Whether credentials came from environment variables (only relevant for static credentials)
credentials_from_env: bool,
+ credentials_provider: Arc<dyn CredentialsProvider>,
_subscription: Subscription,
}
impl State {
fn reset_auth(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
- let credentials_provider = <dyn CredentialsProvider>::global(cx);
+ let credentials_provider = self.credentials_provider.clone();
cx.spawn(async move |this, cx| {
credentials_provider
.delete_credentials(AMAZON_AWS_URL, cx)
@@ -220,7 +221,7 @@ impl State {
cx: &mut Context<Self>,
) -> Task<Result<()>> {
let auth = credentials.clone().into_auth();
- let credentials_provider = <dyn CredentialsProvider>::global(cx);
+ let credentials_provider = self.credentials_provider.clone();
cx.spawn(async move |this, cx| {
credentials_provider
.write_credentials(
@@ -287,7 +288,7 @@ impl State {
&self,
cx: &mut Context<Self>,
) -> Task<Result<(), AuthenticateError>> {
- let credentials_provider = <dyn CredentialsProvider>::global(cx);
+ let credentials_provider = self.credentials_provider.clone();
cx.spawn(async move |this, cx| {
// Try environment variables first
let (auth, from_env) = if let Some(bearer_token) = &ZED_BEDROCK_BEARER_TOKEN_VAR.value {
@@ -400,11 +401,16 @@ pub struct BedrockLanguageModelProvider {
}
impl BedrockLanguageModelProvider {
- pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+ pub fn new(
+ http_client: Arc<dyn HttpClient>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
+ cx: &mut App,
+ ) -> Self {
let state = cx.new(|cx| State {
auth: None,
settings: Some(AllLanguageModelSettings::get_global(cx).bedrock.clone()),
credentials_from_env: false,
+ credentials_provider,
_subscription: cx.observe_global::<SettingsStore>(|_, cx| {
cx.notify();
}),
@@ -1,7 +1,9 @@
use ai_onboarding::YoungAccountBanner;
use anthropic::AnthropicModelMode;
use anyhow::{Context as _, Result, anyhow};
-use client::{Client, UserStore, zed_urls};
+use client::{
+ Client, NeedsLlmTokenRefresh, RefreshLlmTokenListener, UserStore, global_llm_token, zed_urls,
+};
use cloud_api_types::{OrganizationId, Plan};
use cloud_llm_client::{
CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, CLIENT_SUPPORTS_STATUS_STREAM_ENDED_HEADER_NAME,
@@ -24,10 +26,9 @@ use language_model::{
LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelEffortLevel,
LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
- LanguageModelToolChoice, LanguageModelToolSchemaFormat, LlmApiToken, NeedsLlmTokenRefresh,
- OPEN_AI_PROVIDER_ID, OPEN_AI_PROVIDER_NAME, PaymentRequiredError, RateLimiter,
- RefreshLlmTokenListener, X_AI_PROVIDER_ID, X_AI_PROVIDER_NAME, ZED_CLOUD_PROVIDER_ID,
- ZED_CLOUD_PROVIDER_NAME,
+ LanguageModelToolChoice, LanguageModelToolSchemaFormat, LlmApiToken, OPEN_AI_PROVIDER_ID,
+ OPEN_AI_PROVIDER_NAME, PaymentRequiredError, RateLimiter, X_AI_PROVIDER_ID, X_AI_PROVIDER_NAME,
+ ZED_CLOUD_PROVIDER_ID, ZED_CLOUD_PROVIDER_NAME,
};
use release_channel::AppVersion;
use schemars::JsonSchema;
@@ -111,7 +112,7 @@ impl State {
cx: &mut Context<Self>,
) -> Self {
let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx);
- let llm_api_token = LlmApiToken::global(cx);
+ let llm_api_token = global_llm_token(cx);
Self {
client: client.clone(),
llm_api_token,
@@ -226,7 +227,9 @@ impl State {
organization_id: Option<OrganizationId>,
) -> Result<ListModelsResponse> {
let http_client = &client.http_client();
- let token = llm_api_token.acquire(&client, organization_id).await?;
+ let token = client
+ .acquire_llm_token(&llm_api_token, organization_id)
+ .await?;
let request = http_client::Request::builder()
.method(Method::GET)
@@ -414,8 +417,8 @@ impl CloudLanguageModel {
) -> Result<PerformLlmCompletionResponse> {
let http_client = &client.http_client();
- let mut token = llm_api_token
- .acquire(&client, organization_id.clone())
+ let mut token = client
+ .acquire_llm_token(&llm_api_token, organization_id.clone())
.await?;
let mut refreshed_token = false;
@@ -447,8 +450,8 @@ impl CloudLanguageModel {
}
if !refreshed_token && response.needs_llm_token_refresh() {
- token = llm_api_token
- .refresh(&client, organization_id.clone())
+ token = client
+ .refresh_llm_token(&llm_api_token, organization_id.clone())
.await?;
refreshed_token = true;
continue;
@@ -713,7 +716,9 @@ impl LanguageModel for CloudLanguageModel {
into_google(request, model_id.clone(), GoogleModelMode::Default);
async move {
let http_client = &client.http_client();
- let token = llm_api_token.acquire(&client, organization_id).await?;
+ let token = client
+ .acquire_llm_token(&llm_api_token, organization_id)
+ .await?;
let request_body = CountTokensBody {
provider: cloud_llm_client::LanguageModelProvider::Google,
@@ -1,5 +1,6 @@
use anyhow::{Result, anyhow};
use collections::{BTreeMap, HashMap};
+use credentials_provider::CredentialsProvider;
use deepseek::DEEPSEEK_API_URL;
use futures::Stream;
@@ -49,6 +50,7 @@ pub struct DeepSeekLanguageModelProvider {
pub struct State {
api_key_state: ApiKeyState,
+ credentials_provider: Arc<dyn CredentialsProvider>,
}
impl State {
@@ -57,30 +59,51 @@ impl State {
}
fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = DeepSeekLanguageModelProvider::api_url(cx);
- self.api_key_state
- .store(api_url, api_key, |this| &mut this.api_key_state, cx)
+ self.api_key_state.store(
+ api_url,
+ api_key,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = DeepSeekLanguageModelProvider::api_url(cx);
- self.api_key_state
- .load_if_needed(api_url, |this| &mut this.api_key_state, cx)
+ self.api_key_state.load_if_needed(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
}
impl DeepSeekLanguageModelProvider {
- pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+ pub fn new(
+ http_client: Arc<dyn HttpClient>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
+ cx: &mut App,
+ ) -> Self {
let state = cx.new(|cx| {
cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
+ let credentials_provider = this.credentials_provider.clone();
let api_url = Self::api_url(cx);
- this.api_key_state
- .handle_url_change(api_url, |this| &mut this.api_key_state, cx);
+ this.api_key_state.handle_url_change(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
cx.notify();
})
.detach();
State {
api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
+ credentials_provider,
}
});
@@ -1,5 +1,6 @@
use anyhow::{Context as _, Result};
use collections::BTreeMap;
+use credentials_provider::CredentialsProvider;
use futures::{FutureExt, Stream, StreamExt, future::BoxFuture};
use google_ai::{
FunctionDeclaration, GenerateContentResponse, GoogleModelMode, Part, SystemInstruction,
@@ -60,6 +61,7 @@ pub struct GoogleLanguageModelProvider {
pub struct State {
api_key_state: ApiKeyState,
+ credentials_provider: Arc<dyn CredentialsProvider>,
}
const GEMINI_API_KEY_VAR_NAME: &str = "GEMINI_API_KEY";
@@ -76,30 +78,51 @@ impl State {
}
fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = GoogleLanguageModelProvider::api_url(cx);
- self.api_key_state
- .store(api_url, api_key, |this| &mut this.api_key_state, cx)
+ self.api_key_state.store(
+ api_url,
+ api_key,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = GoogleLanguageModelProvider::api_url(cx);
- self.api_key_state
- .load_if_needed(api_url, |this| &mut this.api_key_state, cx)
+ self.api_key_state.load_if_needed(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
}
impl GoogleLanguageModelProvider {
- pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+ pub fn new(
+ http_client: Arc<dyn HttpClient>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
+ cx: &mut App,
+ ) -> Self {
let state = cx.new(|cx| {
cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
+ let credentials_provider = this.credentials_provider.clone();
let api_url = Self::api_url(cx);
- this.api_key_state
- .handle_url_change(api_url, |this| &mut this.api_key_state, cx);
+ this.api_key_state.handle_url_change(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
cx.notify();
})
.detach();
State {
api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
+ credentials_provider,
}
});
@@ -1,5 +1,6 @@
use anyhow::{Result, anyhow};
use collections::HashMap;
+use credentials_provider::CredentialsProvider;
use fs::Fs;
use futures::Stream;
use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream};
@@ -52,6 +53,7 @@ pub struct LmStudioLanguageModelProvider {
pub struct State {
api_key_state: ApiKeyState,
+ credentials_provider: Arc<dyn CredentialsProvider>,
http_client: Arc<dyn HttpClient>,
available_models: Vec<lmstudio::Model>,
fetch_model_task: Option<Task<Result<()>>>,
@@ -64,10 +66,15 @@ impl State {
}
fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = LmStudioLanguageModelProvider::api_url(cx).into();
- let task = self
- .api_key_state
- .store(api_url, api_key, |this| &mut this.api_key_state, cx);
+ let task = self.api_key_state.store(
+ api_url,
+ api_key,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
self.restart_fetch_models_task(cx);
task
}
@@ -114,10 +121,14 @@ impl State {
}
fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = LmStudioLanguageModelProvider::api_url(cx).into();
- let _task = self
- .api_key_state
- .load_if_needed(api_url, |this| &mut this.api_key_state, cx);
+ let _task = self.api_key_state.load_if_needed(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
if self.is_authenticated() {
return Task::ready(Ok(()));
@@ -152,16 +163,29 @@ impl State {
}
impl LmStudioLanguageModelProvider {
- pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+ pub fn new(
+ http_client: Arc<dyn HttpClient>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
+ cx: &mut App,
+ ) -> Self {
let this = Self {
http_client: http_client.clone(),
state: cx.new(|cx| {
let subscription = cx.observe_global::<SettingsStore>({
let mut settings = AllLanguageModelSettings::get_global(cx).lmstudio.clone();
move |this: &mut State, cx| {
- let new_settings = &AllLanguageModelSettings::get_global(cx).lmstudio;
- if &settings != new_settings {
- settings = new_settings.clone();
+ let new_settings =
+ AllLanguageModelSettings::get_global(cx).lmstudio.clone();
+ if settings != new_settings {
+ let credentials_provider = this.credentials_provider.clone();
+ let api_url = Self::api_url(cx).into();
+ this.api_key_state.handle_url_change(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
+ settings = new_settings;
this.restart_fetch_models_task(cx);
cx.notify();
}
@@ -173,6 +197,7 @@ impl LmStudioLanguageModelProvider {
Self::api_url(cx).into(),
(*API_KEY_ENV_VAR).clone(),
),
+ credentials_provider,
http_client,
available_models: Default::default(),
fetch_model_task: None,
@@ -1,5 +1,6 @@
use anyhow::{Result, anyhow};
use collections::BTreeMap;
+use credentials_provider::CredentialsProvider;
use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream};
use gpui::{AnyView, App, AsyncApp, Context, Entity, Global, SharedString, Task, Window};
@@ -43,6 +44,7 @@ pub struct MistralLanguageModelProvider {
pub struct State {
api_key_state: ApiKeyState,
+ credentials_provider: Arc<dyn CredentialsProvider>,
}
impl State {
@@ -51,15 +53,26 @@ impl State {
}
fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = MistralLanguageModelProvider::api_url(cx);
- self.api_key_state
- .store(api_url, api_key, |this| &mut this.api_key_state, cx)
+ self.api_key_state.store(
+ api_url,
+ api_key,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = MistralLanguageModelProvider::api_url(cx);
- self.api_key_state
- .load_if_needed(api_url, |this| &mut this.api_key_state, cx)
+ self.api_key_state.load_if_needed(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
}
@@ -73,20 +86,30 @@ impl MistralLanguageModelProvider {
.map(|this| &this.0)
}
- pub fn global(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Arc<Self> {
+ pub fn global(
+ http_client: Arc<dyn HttpClient>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
+ cx: &mut App,
+ ) -> Arc<Self> {
if let Some(this) = cx.try_global::<GlobalMistralLanguageModelProvider>() {
return this.0.clone();
}
let state = cx.new(|cx| {
cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
+ let credentials_provider = this.credentials_provider.clone();
let api_url = Self::api_url(cx);
- this.api_key_state
- .handle_url_change(api_url, |this| &mut this.api_key_state, cx);
+ this.api_key_state.handle_url_change(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
cx.notify();
})
.detach();
State {
api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
+ credentials_provider,
}
});
@@ -1,4 +1,5 @@
use anyhow::{Result, anyhow};
+use credentials_provider::CredentialsProvider;
use fs::Fs;
use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream};
use futures::{Stream, TryFutureExt, stream};
@@ -54,6 +55,7 @@ pub struct OllamaLanguageModelProvider {
pub struct State {
api_key_state: ApiKeyState,
+ credentials_provider: Arc<dyn CredentialsProvider>,
http_client: Arc<dyn HttpClient>,
fetched_models: Vec<ollama::Model>,
fetch_model_task: Option<Task<Result<()>>>,
@@ -65,10 +67,15 @@ impl State {
}
fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = OllamaLanguageModelProvider::api_url(cx);
- let task = self
- .api_key_state
- .store(api_url, api_key, |this| &mut this.api_key_state, cx);
+ let task = self.api_key_state.store(
+ api_url,
+ api_key,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
self.fetched_models.clear();
cx.spawn(async move |this, cx| {
@@ -80,10 +87,14 @@ impl State {
}
fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = OllamaLanguageModelProvider::api_url(cx);
- let task = self
- .api_key_state
- .load_if_needed(api_url, |this| &mut this.api_key_state, cx);
+ let task = self.api_key_state.load_if_needed(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
// Always try to fetch models - if no API key is needed (local Ollama), it will work
// If API key is needed and provided, it will work
@@ -157,7 +168,11 @@ impl State {
}
impl OllamaLanguageModelProvider {
- pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+ pub fn new(
+ http_client: Arc<dyn HttpClient>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
+ cx: &mut App,
+ ) -> Self {
let this = Self {
http_client: http_client.clone(),
state: cx.new(|cx| {
@@ -170,6 +185,14 @@ impl OllamaLanguageModelProvider {
let url_changed = last_settings.api_url != current_settings.api_url;
last_settings = current_settings.clone();
if url_changed {
+ let credentials_provider = this.credentials_provider.clone();
+ let api_url = Self::api_url(cx);
+ this.api_key_state.handle_url_change(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
this.fetched_models.clear();
this.authenticate(cx).detach();
}
@@ -184,6 +207,7 @@ impl OllamaLanguageModelProvider {
fetched_models: Default::default(),
fetch_model_task: None,
api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
+ credentials_provider,
}
}),
};
@@ -1,5 +1,6 @@
use anyhow::{Result, anyhow};
use collections::{BTreeMap, HashMap};
+use credentials_provider::CredentialsProvider;
use futures::Stream;
use futures::{FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
@@ -55,6 +56,7 @@ pub struct OpenAiLanguageModelProvider {
pub struct State {
api_key_state: ApiKeyState,
+ credentials_provider: Arc<dyn CredentialsProvider>,
}
impl State {
@@ -63,30 +65,51 @@ impl State {
}
fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = OpenAiLanguageModelProvider::api_url(cx);
- self.api_key_state
- .store(api_url, api_key, |this| &mut this.api_key_state, cx)
+ self.api_key_state.store(
+ api_url,
+ api_key,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = OpenAiLanguageModelProvider::api_url(cx);
- self.api_key_state
- .load_if_needed(api_url, |this| &mut this.api_key_state, cx)
+ self.api_key_state.load_if_needed(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
}
impl OpenAiLanguageModelProvider {
- pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+ pub fn new(
+ http_client: Arc<dyn HttpClient>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
+ cx: &mut App,
+ ) -> Self {
let state = cx.new(|cx| {
cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
+ let credentials_provider = this.credentials_provider.clone();
let api_url = Self::api_url(cx);
- this.api_key_state
- .handle_url_change(api_url, |this| &mut this.api_key_state, cx);
+ this.api_key_state.handle_url_change(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
cx.notify();
})
.detach();
State {
api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
+ credentials_provider,
}
});
@@ -1,5 +1,6 @@
use anyhow::Result;
use convert_case::{Case, Casing};
+use credentials_provider::CredentialsProvider;
use futures::{FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
use http_client::HttpClient;
@@ -44,6 +45,7 @@ pub struct State {
id: Arc<str>,
api_key_state: ApiKeyState,
settings: OpenAiCompatibleSettings,
+ credentials_provider: Arc<dyn CredentialsProvider>,
}
impl State {
@@ -52,20 +54,36 @@ impl State {
}
fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = SharedString::new(self.settings.api_url.as_str());
- self.api_key_state
- .store(api_url, api_key, |this| &mut this.api_key_state, cx)
+ self.api_key_state.store(
+ api_url,
+ api_key,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = SharedString::new(self.settings.api_url.clone());
- self.api_key_state
- .load_if_needed(api_url, |this| &mut this.api_key_state, cx)
+ self.api_key_state.load_if_needed(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
}
impl OpenAiCompatibleLanguageModelProvider {
- pub fn new(id: Arc<str>, http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+ pub fn new(
+ id: Arc<str>,
+ http_client: Arc<dyn HttpClient>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
+ cx: &mut App,
+ ) -> Self {
fn resolve_settings<'a>(id: &'a str, cx: &'a App) -> Option<&'a OpenAiCompatibleSettings> {
crate::AllLanguageModelSettings::get_global(cx)
.openai_compatible
@@ -79,10 +97,12 @@ impl OpenAiCompatibleLanguageModelProvider {
return;
};
if &this.settings != &settings {
+ let credentials_provider = this.credentials_provider.clone();
let api_url = SharedString::new(settings.api_url.as_str());
this.api_key_state.handle_url_change(
api_url,
|this| &mut this.api_key_state,
+ credentials_provider,
cx,
);
this.settings = settings;
@@ -98,6 +118,7 @@ impl OpenAiCompatibleLanguageModelProvider {
EnvVar::new(api_key_env_var_name),
),
settings,
+ credentials_provider,
}
});
@@ -381,7 +402,7 @@ impl LanguageModel for OpenAiCompatibleLanguageModel {
self.model.capabilities.parallel_tool_calls,
self.model.capabilities.prompt_cache_key,
self.max_output_tokens(),
- None,
+ self.model.reasoning_effort.clone(),
);
let completions = self.stream_completion(request, cx);
async move {
@@ -396,7 +417,7 @@ impl LanguageModel for OpenAiCompatibleLanguageModel {
self.model.capabilities.parallel_tool_calls,
self.model.capabilities.prompt_cache_key,
self.max_output_tokens(),
- None,
+ self.model.reasoning_effort.clone(),
);
let completions = self.stream_response(request, cx);
async move {
@@ -1,5 +1,6 @@
use anyhow::Result;
use collections::HashMap;
+use credentials_provider::CredentialsProvider;
use futures::{FutureExt, Stream, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task};
use http_client::HttpClient;
@@ -42,6 +43,7 @@ pub struct OpenRouterLanguageModelProvider {
pub struct State {
api_key_state: ApiKeyState,
+ credentials_provider: Arc<dyn CredentialsProvider>,
http_client: Arc<dyn HttpClient>,
available_models: Vec<open_router::Model>,
fetch_models_task: Option<Task<Result<(), LanguageModelCompletionError>>>,
@@ -53,16 +55,26 @@ impl State {
}
fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = OpenRouterLanguageModelProvider::api_url(cx);
- self.api_key_state
- .store(api_url, api_key, |this| &mut this.api_key_state, cx)
+ self.api_key_state.store(
+ api_url,
+ api_key,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = OpenRouterLanguageModelProvider::api_url(cx);
- let task = self
- .api_key_state
- .load_if_needed(api_url, |this| &mut this.api_key_state, cx);
+ let task = self.api_key_state.load_if_needed(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
cx.spawn(async move |this, cx| {
let result = task.await;
@@ -114,7 +126,11 @@ impl State {
}
impl OpenRouterLanguageModelProvider {
- pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+ pub fn new(
+ http_client: Arc<dyn HttpClient>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
+ cx: &mut App,
+ ) -> Self {
let state = cx.new(|cx| {
cx.observe_global::<SettingsStore>({
let mut last_settings = OpenRouterLanguageModelProvider::settings(cx).clone();
@@ -131,6 +147,7 @@ impl OpenRouterLanguageModelProvider {
.detach();
State {
api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
+ credentials_provider,
http_client: http_client.clone(),
available_models: Vec::new(),
fetch_models_task: None,
@@ -1,5 +1,6 @@
use anyhow::Result;
use collections::BTreeMap;
+use credentials_provider::CredentialsProvider;
use futures::{FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
use http_client::HttpClient;
@@ -43,6 +44,7 @@ pub struct OpenCodeLanguageModelProvider {
pub struct State {
api_key_state: ApiKeyState,
+ credentials_provider: Arc<dyn CredentialsProvider>,
}
impl State {
@@ -51,30 +53,51 @@ impl State {
}
fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = OpenCodeLanguageModelProvider::api_url(cx);
- self.api_key_state
- .store(api_url, api_key, |this| &mut this.api_key_state, cx)
+ self.api_key_state.store(
+ api_url,
+ api_key,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = OpenCodeLanguageModelProvider::api_url(cx);
- self.api_key_state
- .load_if_needed(api_url, |this| &mut this.api_key_state, cx)
+ self.api_key_state.load_if_needed(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
}
impl OpenCodeLanguageModelProvider {
- pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+ pub fn new(
+ http_client: Arc<dyn HttpClient>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
+ cx: &mut App,
+ ) -> Self {
let state = cx.new(|cx| {
cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
+ let credentials_provider = this.credentials_provider.clone();
let api_url = Self::api_url(cx);
- this.api_key_state
- .handle_url_change(api_url, |this| &mut this.api_key_state, cx);
+ this.api_key_state.handle_url_change(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
cx.notify();
})
.detach();
State {
api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
+ credentials_provider,
}
});
@@ -1,5 +1,6 @@
use anyhow::Result;
use collections::BTreeMap;
+use credentials_provider::CredentialsProvider;
use futures::{FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
use http_client::HttpClient;
@@ -38,6 +39,7 @@ pub struct VercelLanguageModelProvider {
pub struct State {
api_key_state: ApiKeyState,
+ credentials_provider: Arc<dyn CredentialsProvider>,
}
impl State {
@@ -46,30 +48,51 @@ impl State {
}
fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = VercelLanguageModelProvider::api_url(cx);
- self.api_key_state
- .store(api_url, api_key, |this| &mut this.api_key_state, cx)
+ self.api_key_state.store(
+ api_url,
+ api_key,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = VercelLanguageModelProvider::api_url(cx);
- self.api_key_state
- .load_if_needed(api_url, |this| &mut this.api_key_state, cx)
+ self.api_key_state.load_if_needed(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
}
impl VercelLanguageModelProvider {
- pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+ pub fn new(
+ http_client: Arc<dyn HttpClient>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
+ cx: &mut App,
+ ) -> Self {
let state = cx.new(|cx| {
cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
+ let credentials_provider = this.credentials_provider.clone();
let api_url = Self::api_url(cx);
- this.api_key_state
- .handle_url_change(api_url, |this| &mut this.api_key_state, cx);
+ this.api_key_state.handle_url_change(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
cx.notify();
})
.detach();
State {
api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
+ credentials_provider,
}
});
@@ -1,5 +1,6 @@
use anyhow::Result;
use collections::BTreeMap;
+use credentials_provider::CredentialsProvider;
use futures::{AsyncReadExt, FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest, http};
@@ -41,6 +42,7 @@ pub struct VercelAiGatewayLanguageModelProvider {
pub struct State {
api_key_state: ApiKeyState,
+ credentials_provider: Arc<dyn CredentialsProvider>,
http_client: Arc<dyn HttpClient>,
available_models: Vec<AvailableModel>,
fetch_models_task: Option<Task<Result<(), LanguageModelCompletionError>>>,
@@ -52,16 +54,26 @@ impl State {
}
fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx);
- self.api_key_state
- .store(api_url, api_key, |this| &mut this.api_key_state, cx)
+ self.api_key_state.store(
+ api_url,
+ api_key,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx);
- let task = self
- .api_key_state
- .load_if_needed(api_url, |this| &mut this.api_key_state, cx);
+ let task = self.api_key_state.load_if_needed(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
cx.spawn(async move |this, cx| {
let result = task.await;
@@ -100,7 +112,11 @@ impl State {
}
impl VercelAiGatewayLanguageModelProvider {
- pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+ pub fn new(
+ http_client: Arc<dyn HttpClient>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
+ cx: &mut App,
+ ) -> Self {
let state = cx.new(|cx| {
cx.observe_global::<SettingsStore>({
let mut last_settings = VercelAiGatewayLanguageModelProvider::settings(cx).clone();
@@ -116,6 +132,7 @@ impl VercelAiGatewayLanguageModelProvider {
.detach();
State {
api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
+ credentials_provider,
http_client: http_client.clone(),
available_models: Vec::new(),
fetch_models_task: None,
@@ -1,5 +1,6 @@
use anyhow::Result;
use collections::BTreeMap;
+use credentials_provider::CredentialsProvider;
use futures::{FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, Task, Window};
use http_client::HttpClient;
@@ -39,6 +40,7 @@ pub struct XAiLanguageModelProvider {
pub struct State {
api_key_state: ApiKeyState,
+ credentials_provider: Arc<dyn CredentialsProvider>,
}
impl State {
@@ -47,30 +49,51 @@ impl State {
}
fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = XAiLanguageModelProvider::api_url(cx);
- self.api_key_state
- .store(api_url, api_key, |this| &mut this.api_key_state, cx)
+ self.api_key_state.store(
+ api_url,
+ api_key,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
+ let credentials_provider = self.credentials_provider.clone();
let api_url = XAiLanguageModelProvider::api_url(cx);
- self.api_key_state
- .load_if_needed(api_url, |this| &mut this.api_key_state, cx)
+ self.api_key_state.load_if_needed(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ )
}
}
impl XAiLanguageModelProvider {
- pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
+ pub fn new(
+ http_client: Arc<dyn HttpClient>,
+ credentials_provider: Arc<dyn CredentialsProvider>,
+ cx: &mut App,
+ ) -> Self {
let state = cx.new(|cx| {
cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
+ let credentials_provider = this.credentials_provider.clone();
let api_url = Self::api_url(cx);
- this.api_key_state
- .handle_url_change(api_url, |this| &mut this.api_key_state, cx);
+ this.api_key_state.handle_url_change(
+ api_url,
+ |this| &mut this.api_key_state,
+ credentials_provider,
+ cx,
+ );
cx.notify();
})
.detach();
State {
api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
+ credentials_provider,
}
});
@@ -29,7 +29,7 @@ impl ActiveBufferLanguage {
self.active_language = Some(None);
let editor = editor.read(cx);
- if let Some((_, buffer, _)) = editor.active_excerpt(cx)
+ if let Some(buffer) = editor.active_buffer(cx)
&& let Some(language) = buffer.read(cx).language()
{
self.active_language = Some(Some(language.name()));
@@ -51,11 +51,11 @@ impl LanguageSelector {
cx: &mut Context<Workspace>,
) -> Option<()> {
let registry = workspace.app_state().languages.clone();
- let (_, buffer, _) = workspace
+ let buffer = workspace
.active_item(cx)?
.act_as::<Editor>(cx)?
.read(cx)
- .active_excerpt(cx)?;
+ .active_buffer(cx)?;
let project = workspace.project().clone();
workspace.toggle_modal(window, cx, move |window, cx| {
@@ -414,10 +414,10 @@ mod tests {
) -> Entity<Editor> {
let editor = open_new_buffer_editor(workspace, project, cx).await;
// Ensure the buffer has no language after the editor is created
- let (_, buffer, _) = editor.read_with(cx, |editor, cx| {
+ let buffer = editor.read_with(cx, |editor, cx| {
editor
- .active_excerpt(cx)
- .expect("editor should have an active excerpt")
+ .active_buffer(cx)
+ .expect("editor should have an active buffer")
});
buffer.update(cx, |buffer, cx| {
buffer.set_language(None, cx);
@@ -454,8 +454,8 @@ mod tests {
.await
.expect("language should exist in registry");
editor.update(cx, move |editor, cx| {
- let (_, buffer, _) = editor
- .active_excerpt(cx)
+ let buffer = editor
+ .active_buffer(cx)
.expect("editor should have an active excerpt");
buffer.update(cx, |buffer, cx| {
buffer.set_language(Some(language), cx);
@@ -578,6 +578,15 @@ mod tests {
assert_selected_language_for_editor(&workspace, &rust_editor, Some("Rust"), cx);
assert_selected_language_for_editor(&workspace, &typescript_editor, Some("TypeScript"), cx);
+ // Ensure the empty editor's buffer has no language before asserting
+ let buffer = empty_editor.read_with(cx, |editor, cx| {
+ editor
+ .active_buffer(cx)
+ .expect("editor should have an active excerpt")
+ });
+ buffer.update(cx, |buffer, cx| {
+ buffer.set_language(None, cx);
+ });
assert_selected_language_for_editor(&workspace, &empty_editor, None, cx);
}
@@ -1,5 +1,5 @@
use editor::{
- Anchor, Editor, ExcerptId, HighlightKey, MultiBufferSnapshot, SelectionEffects, ToPoint,
+ Anchor, Editor, HighlightKey, MultiBufferSnapshot, SelectionEffects, ToPoint,
scroll::Autoscroll,
};
use gpui::{
@@ -8,8 +8,7 @@ use gpui::{
MouseDownEvent, MouseMoveEvent, ParentElement, Render, ScrollStrategy, SharedString, Styled,
Task, UniformListScrollHandle, WeakEntity, Window, actions, div, rems, uniform_list,
};
-use language::ToOffset;
-
+use language::{BufferId, Point, ToOffset};
use menu::{SelectNext, SelectPrevious};
use std::{mem, ops::Range};
use theme::ActiveTheme;
@@ -114,12 +113,12 @@ impl HighlightCategory {
#[derive(Debug, Clone)]
struct HighlightEntry {
- excerpt_id: ExcerptId,
range: Range<Anchor>,
+ buffer_id: BufferId,
+ buffer_point_range: Range<Point>,
range_display: SharedString,
style: HighlightStyle,
category: HighlightCategory,
- sort_key: (ExcerptId, u32, u32, u32, u32),
}
/// An item in the display list: either a separator between excerpts or a highlight entry.
@@ -319,20 +318,18 @@ impl HighlightsTreeView {
display_map.update(cx, |display_map, cx| {
for (key, text_highlights) in display_map.all_text_highlights() {
for range in &text_highlights.1 {
- let excerpt_id = range.start.excerpt_id;
- let (range_display, sort_key) = format_anchor_range(
- range,
- excerpt_id,
- &multi_buffer_snapshot,
- is_singleton,
- );
+ let Some((range_display, buffer_id, buffer_point_range)) =
+ format_anchor_range(range, &multi_buffer_snapshot)
+ else {
+ continue;
+ };
entries.push(HighlightEntry {
- excerpt_id,
range: range.clone(),
+ buffer_id,
range_display,
style: text_highlights.0,
category: HighlightCategory::Text(*key),
- sort_key,
+ buffer_point_range,
});
}
}
@@ -345,13 +342,11 @@ impl HighlightsTreeView {
.and_then(|buf| buf.read(cx).language().map(|l| l.name()));
for token in tokens.iter() {
let range = token.range.start..token.range.end;
- let excerpt_id = range.start.excerpt_id;
- let (range_display, sort_key) = format_anchor_range(
- &range,
- excerpt_id,
- &multi_buffer_snapshot,
- is_singleton,
- );
+ let Some((range_display, entry_buffer_id, buffer_point_range)) =
+ format_anchor_range(&range, &multi_buffer_snapshot)
+ else {
+ continue;
+ };
let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
token.server_id,
language_name.as_ref(),
@@ -388,8 +383,8 @@ impl HighlightsTreeView {
});
entries.push(HighlightEntry {
- excerpt_id,
range,
+ buffer_id: entry_buffer_id,
range_display,
style: interner[token.style],
category: HighlightCategory::SemanticToken {
@@ -399,7 +394,7 @@ impl HighlightsTreeView {
.map(SharedString::from),
theme_key,
},
- sort_key,
+ buffer_point_range,
});
}
}
@@ -407,7 +402,13 @@ impl HighlightsTreeView {
});
let syntax_theme = cx.theme().syntax().clone();
- for (excerpt_id, buffer_snapshot, excerpt_range) in multi_buffer_snapshot.excerpts() {
+ for excerpt_range in multi_buffer_snapshot.excerpts() {
+ let Some(buffer_snapshot) =
+ multi_buffer_snapshot.buffer_for_id(excerpt_range.context.start.buffer_id)
+ else {
+ continue;
+ };
+
let start_offset = excerpt_range.context.start.to_offset(buffer_snapshot);
let end_offset = excerpt_range.context.end.to_offset(buffer_snapshot);
let range = start_offset..end_offset;
@@ -419,7 +420,10 @@ impl HighlightsTreeView {
let highlight_maps: Vec<_> = grammars.iter().map(|g| g.highlight_map()).collect();
for capture in captures {
- let highlight_id = highlight_maps[capture.grammar_index].get(capture.index);
+ let Some(highlight_id) = highlight_maps[capture.grammar_index].get(capture.index)
+ else {
+ continue;
+ };
let Some(style) = syntax_theme.get(highlight_id).cloned() else {
continue;
};
@@ -438,8 +442,8 @@ impl HighlightsTreeView {
let start_anchor = buffer_snapshot.anchor_before(capture.node.start_byte());
let end_anchor = buffer_snapshot.anchor_after(capture.node.end_byte());
- let start = multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, start_anchor);
- let end = multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, end_anchor);
+ let start = multi_buffer_snapshot.anchor_in_excerpt(start_anchor);
+ let end = multi_buffer_snapshot.anchor_in_excerpt(end_anchor);
let (start, end) = match (start, end) {
(Some(s), Some(e)) => (s, e),
@@ -447,29 +451,38 @@ impl HighlightsTreeView {
};
let range = start..end;
- let (range_display, sort_key) =
- format_anchor_range(&range, excerpt_id, &multi_buffer_snapshot, is_singleton);
+ let Some((range_display, buffer_id, buffer_point_range)) =
+ format_anchor_range(&range, &multi_buffer_snapshot)
+ else {
+ continue;
+ };
entries.push(HighlightEntry {
- excerpt_id,
range,
+ buffer_id,
range_display,
style,
category: HighlightCategory::SyntaxToken {
capture_name,
theme_key,
},
- sort_key,
+ buffer_point_range,
});
}
}
entries.sort_by(|a, b| {
- a.sort_key
- .cmp(&b.sort_key)
+ a.buffer_id
+ .cmp(&b.buffer_id)
+ .then_with(|| a.buffer_point_range.start.cmp(&b.buffer_point_range.start))
+ .then_with(|| a.buffer_point_range.end.cmp(&b.buffer_point_range.end))
.then_with(|| a.category.cmp(&b.category))
});
- entries.dedup_by(|a, b| a.sort_key == b.sort_key && a.category == b.category);
+ entries.dedup_by(|a, b| {
+ a.buffer_id == b.buffer_id
+ && a.buffer_point_range == b.buffer_point_range
+ && a.category == b.category
+ });
self.cached_entries = entries;
self.rebuild_display_items(&multi_buffer_snapshot, cx);
@@ -485,7 +498,7 @@ impl HighlightsTreeView {
fn rebuild_display_items(&mut self, snapshot: &MultiBufferSnapshot, cx: &App) {
self.display_items.clear();
- let mut last_excerpt_id: Option<ExcerptId> = None;
+ let mut last_range_end: Option<Anchor> = None;
for (entry_ix, entry) in self.cached_entries.iter().enumerate() {
if !self.should_show_entry(entry) {
@@ -493,11 +506,14 @@ impl HighlightsTreeView {
}
if !self.is_singleton {
- let excerpt_changed =
- last_excerpt_id.is_none_or(|last_id| last_id != entry.excerpt_id);
+ let excerpt_changed = last_range_end.is_none_or(|anchor| {
+ snapshot
+ .excerpt_containing(anchor..entry.range.start)
+ .is_none()
+ });
if excerpt_changed {
- last_excerpt_id = Some(entry.excerpt_id);
- let label = excerpt_label_for(entry.excerpt_id, snapshot, cx);
+ last_range_end = Some(entry.range.end);
+ let label = excerpt_label_for(entry, snapshot, cx);
self.display_items
.push(DisplayItem::ExcerptSeparator { label });
}
@@ -516,10 +532,6 @@ impl HighlightsTreeView {
}
fn scroll_to_cursor_position(&mut self, cursor: &Anchor, snapshot: &MultiBufferSnapshot) {
- let cursor_point = cursor.to_point(snapshot);
- let cursor_key = (cursor_point.row, cursor_point.column);
- let cursor_excerpt = cursor.excerpt_id;
-
let best = self
.display_items
.iter()
@@ -532,17 +544,18 @@ impl HighlightsTreeView {
_ => None,
})
.filter(|(_, _, entry)| {
- let (excerpt_id, start_row, start_col, end_row, end_col) = entry.sort_key;
- if !self.is_singleton && excerpt_id != cursor_excerpt {
- return false;
- }
- let start = (start_row, start_col);
- let end = (end_row, end_col);
- cursor_key >= start && cursor_key <= end
+ entry.range.start.cmp(&cursor, snapshot).is_le()
+ && cursor.cmp(&entry.range.end, snapshot).is_lt()
})
.min_by_key(|(_, _, entry)| {
- let (_, start_row, start_col, end_row, end_col) = entry.sort_key;
- (end_row - start_row, end_col.saturating_sub(start_col))
+ (
+ entry.buffer_point_range.end.row - entry.buffer_point_range.start.row,
+ entry
+ .buffer_point_range
+ .end
+ .column
+ .saturating_sub(entry.buffer_point_range.start.column),
+ )
})
.map(|(display_ix, entry_ix, _)| (display_ix, entry_ix));
@@ -1076,12 +1089,13 @@ impl ToolbarItemView for HighlightsTreeToolbarItemView {
}
fn excerpt_label_for(
- excerpt_id: ExcerptId,
+ entry: &HighlightEntry,
snapshot: &MultiBufferSnapshot,
cx: &App,
) -> SharedString {
- let buffer = snapshot.buffer_for_excerpt(excerpt_id);
- let path_label = buffer
+ let path_label = snapshot
+ .anchor_to_buffer_anchor(entry.range.start)
+ .and_then(|(anchor, _)| snapshot.buffer_for_id(anchor.buffer_id))
.and_then(|buf| buf.file())
.map(|file| {
let full_path = file.full_path(cx);
@@ -1093,50 +1107,21 @@ fn excerpt_label_for(
fn format_anchor_range(
range: &Range<Anchor>,
- excerpt_id: ExcerptId,
snapshot: &MultiBufferSnapshot,
- is_singleton: bool,
-) -> (SharedString, (ExcerptId, u32, u32, u32, u32)) {
- if is_singleton {
- let start = range.start.to_point(snapshot);
- let end = range.end.to_point(snapshot);
- let display = SharedString::from(format!(
- "[{}:{} - {}:{}]",
- start.row + 1,
- start.column + 1,
- end.row + 1,
- end.column + 1,
- ));
- let sort_key = (excerpt_id, start.row, start.column, end.row, end.column);
- (display, sort_key)
- } else {
- let buffer = snapshot.buffer_for_excerpt(excerpt_id);
- if let Some(buffer) = buffer {
- let start = language::ToPoint::to_point(&range.start.text_anchor, buffer);
- let end = language::ToPoint::to_point(&range.end.text_anchor, buffer);
- let display = SharedString::from(format!(
- "[{}:{} - {}:{}]",
- start.row + 1,
- start.column + 1,
- end.row + 1,
- end.column + 1,
- ));
- let sort_key = (excerpt_id, start.row, start.column, end.row, end.column);
- (display, sort_key)
- } else {
- let start = range.start.to_point(snapshot);
- let end = range.end.to_point(snapshot);
- let display = SharedString::from(format!(
- "[{}:{} - {}:{}]",
- start.row + 1,
- start.column + 1,
- end.row + 1,
- end.column + 1,
- ));
- let sort_key = (excerpt_id, start.row, start.column, end.row, end.column);
- (display, sort_key)
- }
- }
+) -> Option<(SharedString, BufferId, Range<Point>)> {
+ let start = range.start.to_point(snapshot);
+ let end = range.end.to_point(snapshot);
+ let ((start_buffer, start), (_, end)) = snapshot
+ .point_to_buffer_point(start)
+ .zip(snapshot.point_to_buffer_point(end))?;
+ let display = SharedString::from(format!(
+ "[{}:{} - {}:{}]",
+ start.row + 1,
+ start.column + 1,
+ end.row + 1,
+ end.column + 1,
+ ));
+ Some((display, start_buffer.remote_id(), start..end))
}
fn render_style_preview(style: HighlightStyle, selected: bool, cx: &App) -> Div {
@@ -1179,13 +1179,20 @@ impl StatusItemView for LspButton {
.and_then(|active_editor| active_editor.editor.upgrade())
.as_ref()
{
- let editor_buffers =
- HashSet::from_iter(editor.read(cx).buffer().read(cx).excerpt_buffer_ids());
+ let editor_buffers = HashSet::from_iter(
+ editor
+ .read(cx)
+ .buffer()
+ .read(cx)
+ .snapshot(cx)
+ .excerpts()
+ .map(|excerpt| excerpt.context.start.buffer_id),
+ );
let _editor_subscription = cx.subscribe_in(
&editor,
window,
|lsp_button, _, e: &EditorEvent, window, cx| match e {
- EditorEvent::ExcerptsAdded { buffer, .. } => {
+ EditorEvent::BufferRangesUpdated { buffer, .. } => {
let updated = lsp_button.server_state.update(cx, |state, cx| {
if let Some(active_editor) = state.active_editor.as_mut() {
let buffer_id = buffer.read(cx).remote_id();
@@ -1198,9 +1205,7 @@ impl StatusItemView for LspButton {
lsp_button.refresh_lsp_menu(false, window, cx);
}
}
- EditorEvent::ExcerptsRemoved {
- removed_buffer_ids, ..
- } => {
+ EditorEvent::BuffersRemoved { removed_buffer_ids } => {
let removed = lsp_button.server_state.update(cx, |state, _| {
let mut removed = false;
if let Some(active_editor) = state.active_editor.as_mut() {
@@ -880,6 +880,7 @@ impl SearchableItem for LspLogView {
// LSP log is read-only.
replacement: false,
selection: false,
+ select_all: true,
}
}
fn active_match_index(
@@ -1,7 +1,6 @@
use command_palette_hooks::CommandPaletteFilter;
use editor::{
- Anchor, Editor, ExcerptId, HighlightKey, MultiBufferOffset, SelectionEffects,
- scroll::Autoscroll,
+ Anchor, Editor, HighlightKey, MultiBufferOffset, SelectionEffects, scroll::Autoscroll,
};
use gpui::{
App, AppContext as _, Context, Div, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
@@ -125,7 +124,6 @@ impl EditorState {
#[derive(Clone)]
struct BufferState {
buffer: Entity<Buffer>,
- excerpt_id: ExcerptId,
active_layer: Option<OwnedSyntaxLayer>,
}
@@ -253,18 +251,18 @@ impl SyntaxTreeView {
let snapshot = editor_state
.editor
.update(cx, |editor, cx| editor.snapshot(window, cx));
- let (buffer, range, excerpt_id) = editor_state.editor.update(cx, |editor, cx| {
+ let (buffer, range) = editor_state.editor.update(cx, |editor, cx| {
let selection_range = editor
.selections
.last::<MultiBufferOffset>(&editor.display_snapshot(cx))
.range();
let multi_buffer = editor.buffer().read(cx);
- let (buffer, range, excerpt_id) = snapshot
+ let (buffer, range, _) = snapshot
.buffer_snapshot()
- .range_to_buffer_ranges(selection_range.start..=selection_range.end)
+ .range_to_buffer_ranges(selection_range.start..selection_range.end)
.pop()?;
let buffer = multi_buffer.buffer(buffer.remote_id()).unwrap();
- Some((buffer, range, excerpt_id))
+ Some((buffer, range))
})?;
// If the cursor has moved into a different excerpt, retrieve a new syntax layer
@@ -273,16 +271,14 @@ impl SyntaxTreeView {
.active_buffer
.get_or_insert_with(|| BufferState {
buffer: buffer.clone(),
- excerpt_id,
active_layer: None,
});
let mut prev_layer = None;
if did_reparse {
prev_layer = buffer_state.active_layer.take();
}
- if buffer_state.buffer != buffer || buffer_state.excerpt_id != excerpt_id {
+ if buffer_state.buffer != buffer {
buffer_state.buffer = buffer.clone();
- buffer_state.excerpt_id = excerpt_id;
buffer_state.active_layer = None;
}
@@ -360,8 +356,7 @@ impl SyntaxTreeView {
// Build a multibuffer anchor range.
let multibuffer = editor_state.editor.read(cx).buffer();
let multibuffer = multibuffer.read(cx).snapshot(cx);
- let excerpt_id = buffer_state.excerpt_id;
- let range = multibuffer.anchor_range_in_excerpt(excerpt_id, range)?;
+ let range = multibuffer.buffer_anchor_range_to_anchor_range(range)?;
let key = cx.entity_id().as_u64() as usize;
// Update the editor with the anchor range.
@@ -1542,10 +1542,10 @@ mod tests {
"await.as_deref_mut(&mut self) -> IterMut<'_, T>".to_string(),
6..18,
vec![
- (6..18, HighlightId(2)),
- (20..23, HighlightId(1)),
- (33..40, HighlightId(0)),
- (45..46, HighlightId(0))
+ (6..18, HighlightId::new(2)),
+ (20..23, HighlightId::new(1)),
+ (33..40, HighlightId::new(0)),
+ (45..46, HighlightId::new(0))
],
))
);
@@ -1572,12 +1572,12 @@ mod tests {
"pub fn as_deref_mut(&mut self) -> IterMut<'_, T>".to_string(),
7..19,
vec![
- (0..3, HighlightId(1)),
- (4..6, HighlightId(1)),
- (7..19, HighlightId(2)),
- (21..24, HighlightId(1)),
- (34..41, HighlightId(0)),
- (46..47, HighlightId(0))
+ (0..3, HighlightId::new(1)),
+ (4..6, HighlightId::new(1)),
+ (7..19, HighlightId::new(2)),
+ (21..24, HighlightId::new(1)),
+ (34..41, HighlightId::new(0)),
+ (46..47, HighlightId::new(0))
],
))
);
@@ -1598,7 +1598,7 @@ mod tests {
Some(CodeLabel::new(
"inner_value: String".to_string(),
6..11,
- vec![(0..11, HighlightId(3)), (13..19, HighlightId(0))],
+ vec![(0..11, HighlightId::new(3)), (13..19, HighlightId::new(0))],
))
);
@@ -1625,8 +1625,8 @@ mod tests {
vec![
(10..13, HighlightId::TABSTOP_INSERT_ID),
(16..19, HighlightId::TABSTOP_INSERT_ID),
- (0..7, HighlightId(2)),
- (7..8, HighlightId(2)),
+ (0..7, HighlightId::new(2)),
+ (7..8, HighlightId::new(2)),
],
))
);
@@ -1653,8 +1653,8 @@ mod tests {
0..4,
vec![
(5..9, HighlightId::TABSTOP_REPLACE_ID),
- (0..3, HighlightId(2)),
- (3..4, HighlightId(2)),
+ (0..3, HighlightId::new(2)),
+ (3..4, HighlightId::new(2)),
],
))
);
@@ -1682,8 +1682,8 @@ mod tests {
vec![
(7..10, HighlightId::TABSTOP_REPLACE_ID),
(13..16, HighlightId::TABSTOP_INSERT_ID),
- (0..2, HighlightId(1)),
- (3..6, HighlightId(1)),
+ (0..2, HighlightId::new(1)),
+ (3..6, HighlightId::new(1)),
],
))
);
@@ -1711,8 +1711,8 @@ mod tests {
vec![
(4..8, HighlightId::TABSTOP_REPLACE_ID),
(12..16, HighlightId::TABSTOP_REPLACE_ID),
- (0..3, HighlightId(1)),
- (9..11, HighlightId(1)),
+ (0..3, HighlightId::new(1)),
+ (9..11, HighlightId::new(1)),
],
))
);
@@ -18,7 +18,7 @@ impl LineEndingIndicator {
self.line_ending = None;
self.active_editor = None;
- if let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx) {
+ if let Some(buffer) = editor.read(cx).active_buffer(cx) {
let line_ending = buffer.read(cx).line_ending();
self.line_ending = Some(line_ending);
self.active_editor = Some(editor.downgrade());
@@ -40,7 +40,7 @@ impl LineEndingSelector {
fn toggle(editor: &WeakEntity<Editor>, window: &mut Window, cx: &mut App) {
let Some((workspace, buffer)) = editor
.update(cx, |editor, cx| {
- Some((editor.workspace()?, editor.active_excerpt(cx)?.1))
+ Some((editor.workspace()?, editor.active_buffer(cx)?))
})
.ok()
.flatten()
@@ -154,6 +154,8 @@ impl MarkdownStyle {
base_text_style: text_style.clone(),
syntax: cx.theme().syntax().clone(),
selection_background_color: colors.element_selection_background,
+ rule_color: colors.border,
+ block_quote_border_color: colors.border,
code_block_overflow_x_scroll: true,
heading_level_styles: Some(HeadingLevelStyles {
h1: Some(TextStyleRefinement {
@@ -261,6 +263,8 @@ pub struct Markdown {
copied_code_blocks: HashSet<ElementId>,
code_block_scroll_handles: BTreeMap<usize, ScrollHandle>,
context_menu_selected_text: Option<String>,
+ search_highlights: Vec<Range<usize>>,
+ active_search_highlight: Option<usize>,
}
#[derive(Clone, Copy, Default)]
@@ -314,6 +318,78 @@ actions!(
]
);
+enum EscapeAction {
+ PassThrough,
+ Nbsp(usize),
+ DoubleNewline,
+ PrefixBackslash,
+}
+
+impl EscapeAction {
+ fn output_len(&self) -> usize {
+ match self {
+ Self::PassThrough => 1,
+ Self::Nbsp(count) => count * '\u{00A0}'.len_utf8(),
+ Self::DoubleNewline => 2,
+ Self::PrefixBackslash => 2,
+ }
+ }
+
+ fn write_to(&self, c: char, output: &mut String) {
+ match self {
+ Self::PassThrough => output.push(c),
+ Self::Nbsp(count) => {
+ for _ in 0..*count {
+ output.push('\u{00A0}');
+ }
+ }
+ Self::DoubleNewline => {
+ output.push('\n');
+ output.push('\n');
+ }
+ Self::PrefixBackslash => {
+ // '\\' is a single backslash in Rust, e.g. '|' -> '\|'
+ output.push('\\');
+ output.push(c);
+ }
+ }
+ }
+}
+
+// Valid to operate on raw bytes since multi-byte UTF-8
+// sequences never contain ASCII-range bytes.
+struct MarkdownEscaper {
+ in_leading_whitespace: bool,
+}
+
+impl MarkdownEscaper {
+ const TAB_SIZE: usize = 4;
+
+ fn new() -> Self {
+ Self {
+ in_leading_whitespace: true,
+ }
+ }
+
+ fn next(&mut self, byte: u8) -> EscapeAction {
+ let action = if self.in_leading_whitespace && byte == b'\t' {
+ EscapeAction::Nbsp(Self::TAB_SIZE)
+ } else if self.in_leading_whitespace && byte == b' ' {
+ EscapeAction::Nbsp(1)
+ } else if byte == b'\n' {
+ EscapeAction::DoubleNewline
+ } else if byte.is_ascii_punctuation() {
+ EscapeAction::PrefixBackslash
+ } else {
+ EscapeAction::PassThrough
+ };
+
+ self.in_leading_whitespace =
+ byte == b'\n' || (self.in_leading_whitespace && (byte == b' ' || byte == b'\t'));
+ action
+ }
+}
+
impl Markdown {
pub fn new(
source: SharedString,
@@ -356,6 +432,8 @@ impl Markdown {
copied_code_blocks: HashSet::default(),
code_block_scroll_handles: BTreeMap::default(),
context_menu_selected_text: None,
+ search_highlights: Vec::new(),
+ active_search_highlight: None,
};
this.parse(cx);
this
@@ -467,6 +545,8 @@ impl Markdown {
self.autoscroll_request = None;
self.pending_parse = None;
self.should_reparse = false;
+ self.search_highlights.clear();
+ self.active_search_highlight = None;
// Don't clear parsed_markdown here - keep existing content visible until new parse completes
self.parse(cx);
}
@@ -477,30 +557,21 @@ impl Markdown {
}
pub fn escape(s: &str) -> Cow<'_, str> {
- // Valid to use bytes since multi-byte UTF-8 doesn't use ASCII chars.
- let count = s
- .bytes()
- .filter(|c| *c == b'\n' || c.is_ascii_punctuation())
- .count();
- if count > 0 {
- let mut output = String::with_capacity(s.len() + count);
- let mut is_newline = false;
- for c in s.chars() {
- if is_newline && c == ' ' {
- continue;
- }
- is_newline = c == '\n';
- if c == '\n' {
- output.push('\n')
- } else if c.is_ascii_punctuation() {
- output.push('\\')
- }
- output.push(c)
- }
- output.into()
- } else {
- s.into()
+ let output_len: usize = {
+ let mut escaper = MarkdownEscaper::new();
+ s.bytes().map(|byte| escaper.next(byte).output_len()).sum()
+ };
+
+ if output_len == s.len() {
+ return s.into();
}
+
+ let mut escaper = MarkdownEscaper::new();
+ let mut output = String::with_capacity(output_len);
+ for c in s.chars() {
+ escaper.next(c as u8).write_to(c, &mut output);
+ }
+ output.into()
}
pub fn selected_text(&self) -> Option<String> {
@@ -511,6 +582,40 @@ impl Markdown {
}
}
+ pub fn set_search_highlights(
+ &mut self,
+ highlights: Vec<Range<usize>>,
+ active: Option<usize>,
+ cx: &mut Context<Self>,
+ ) {
+ self.search_highlights = highlights;
+ self.active_search_highlight = active;
+ cx.notify();
+ }
+
+ pub fn clear_search_highlights(&mut self, cx: &mut Context<Self>) {
+ if !self.search_highlights.is_empty() || self.active_search_highlight.is_some() {
+ self.search_highlights.clear();
+ self.active_search_highlight = None;
+ cx.notify();
+ }
+ }
+
+ pub fn set_active_search_highlight(&mut self, active: Option<usize>, cx: &mut Context<Self>) {
+ if self.active_search_highlight != active {
+ self.active_search_highlight = active;
+ cx.notify();
+ }
+ }
+
+ pub fn search_highlights(&self) -> &[Range<usize>] {
+ &self.search_highlights
+ }
+
+ pub fn active_search_highlight(&self) -> Option<usize> {
+ self.active_search_highlight
+ }
+
fn copy(&self, text: &RenderedText, _: &mut Window, cx: &mut Context<Self>) {
if self.selection.end <= self.selection.start {
return;
@@ -1019,18 +1124,18 @@ impl MarkdownElement {
builder.pop_div();
}
- fn paint_selection(
- &self,
+ fn paint_highlight_range(
bounds: Bounds<Pixels>,
+ start: usize,
+ end: usize,
+ color: Hsla,
rendered_text: &RenderedText,
window: &mut Window,
- cx: &mut App,
) {
- let selection = self.markdown.read(cx).selection.clone();
- let selection_start = rendered_text.position_for_source_index(selection.start);
- let selection_end = rendered_text.position_for_source_index(selection.end);
+ let start_pos = rendered_text.position_for_source_index(start);
+ let end_pos = rendered_text.position_for_source_index(end);
if let Some(((start_position, start_line_height), (end_position, end_line_height))) =
- selection_start.zip(selection_end)
+ start_pos.zip(end_pos)
{
if start_position.y == end_position.y {
window.paint_quad(quad(
@@ -1039,7 +1144,7 @@ impl MarkdownElement {
point(end_position.x, end_position.y + end_line_height),
),
Pixels::ZERO,
- self.style.selection_background_color,
+ color,
Edges::default(),
Hsla::transparent_black(),
BorderStyle::default(),
@@ -1051,7 +1156,7 @@ impl MarkdownElement {
point(bounds.right(), start_position.y + start_line_height),
),
Pixels::ZERO,
- self.style.selection_background_color,
+ color,
Edges::default(),
Hsla::transparent_black(),
BorderStyle::default(),
@@ -1064,7 +1169,7 @@ impl MarkdownElement {
point(bounds.right(), end_position.y),
),
Pixels::ZERO,
- self.style.selection_background_color,
+ color,
Edges::default(),
Hsla::transparent_black(),
BorderStyle::default(),
@@ -1077,7 +1182,7 @@ impl MarkdownElement {
point(end_position.x, end_position.y + end_line_height),
),
Pixels::ZERO,
- self.style.selection_background_color,
+ color,
Edges::default(),
Hsla::transparent_black(),
BorderStyle::default(),
@@ -1086,6 +1191,52 @@ impl MarkdownElement {
}
}
+ fn paint_selection(
+ &self,
+ bounds: Bounds<Pixels>,
+ rendered_text: &RenderedText,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ let selection = self.markdown.read(cx).selection.clone();
+ Self::paint_highlight_range(
+ bounds,
+ selection.start,
+ selection.end,
+ self.style.selection_background_color,
+ rendered_text,
+ window,
+ );
+ }
+
+ fn paint_search_highlights(
+ &self,
+ bounds: Bounds<Pixels>,
+ rendered_text: &RenderedText,
+ window: &mut Window,
+ cx: &mut App,
+ ) {
+ let markdown = self.markdown.read(cx);
+ let active_index = markdown.active_search_highlight;
+ let colors = cx.theme().colors();
+
+ for (i, highlight_range) in markdown.search_highlights.iter().enumerate() {
+ let color = if Some(i) == active_index {
+ colors.search_active_match_background
+ } else {
+ colors.search_match_background
+ };
+ Self::paint_highlight_range(
+ bounds,
+ highlight_range.start,
+ highlight_range.end,
+ color,
+ rendered_text,
+ window,
+ );
+ }
+ }
+
fn paint_mouse_listeners(
&mut self,
hitbox: &Hitbox,
@@ -1890,6 +2041,7 @@ impl Element for MarkdownElement {
self.paint_mouse_listeners(hitbox, &rendered_markdown.text, window, cx);
rendered_markdown.element.paint(window, cx);
+ self.paint_search_highlights(bounds, &rendered_markdown.text, window, cx);
self.paint_selection(bounds, &rendered_markdown.text, window, cx);
}
}
@@ -3077,15 +3229,120 @@ mod tests {
);
}
+ fn nbsp(n: usize) -> String {
+ "\u{00A0}".repeat(n)
+ }
+
+ #[test]
+ fn test_escape_plain_text() {
+ assert_eq!(Markdown::escape("hello world"), "hello world");
+ assert_eq!(Markdown::escape(""), "");
+ assert_eq!(Markdown::escape("café ☕ naïve"), "café ☕ naïve");
+ }
+
+ #[test]
+ fn test_escape_punctuation() {
+ assert_eq!(Markdown::escape("hello `world`"), r"hello \`world\`");
+ assert_eq!(Markdown::escape("a|b"), r"a\|b");
+ }
+
+ #[test]
+ fn test_escape_leading_spaces() {
+ assert_eq!(Markdown::escape(" hello"), [ (4), "hello"].concat());
+ assert_eq!(
+ Markdown::escape(" | { a: string }"),
+ [ (4), r"\| \{ a\: string \}"].concat()
+ );
+ assert_eq!(
+ Markdown::escape(" first\n second"),
+ [ (2), "first\n\n",  (2), "second"].concat()
+ );
+ assert_eq!(Markdown::escape("hello world"), "hello world");
+ }
+
+ #[test]
+ fn test_escape_leading_tabs() {
+ assert_eq!(Markdown::escape("\thello"), [ (4), "hello"].concat());
+ assert_eq!(
+ Markdown::escape("hello\n\t\tindented"),
+ ["hello\n\n",  (8), "indented"].concat()
+ );
+ assert_eq!(
+ Markdown::escape(" \t hello"),
+ [ (1 + 4 + 1), "hello"].concat()
+ );
+ assert_eq!(Markdown::escape("hello\tworld"), "hello\tworld");
+ }
+
#[test]
- fn test_escape() {
- assert_eq!(Markdown::escape("hello `world`"), "hello \\`world\\`");
+ fn test_escape_newlines() {
+ assert_eq!(Markdown::escape("a\nb"), "a\n\nb");
+ assert_eq!(Markdown::escape("a\n\nb"), "a\n\n\n\nb");
+ assert_eq!(Markdown::escape("\nhello"), "\n\nhello");
+ }
+
+ #[test]
+ fn test_escape_multiline_diagnostic() {
assert_eq!(
- Markdown::escape("hello\n cool world"),
- "hello\n\ncool world"
+ Markdown::escape(" | { a: string }\n | { b: number }"),
+ [
+  (4),
+ r"\| \{ a\: string \}",
+ "\n\n",
+  (4),
+ r"\| \{ b\: number \}",
+ ]
+ .concat()
);
}
+ fn has_code_block(markdown: &str) -> bool {
+ let parsed_data = parse_markdown_with_options(markdown, false);
+ parsed_data
+ .events
+ .iter()
+ .any(|(_, event)| matches!(event, MarkdownEvent::Start(MarkdownTag::CodeBlock { .. })))
+ }
+
+ #[test]
+ fn test_escape_output_len_matches_precomputed() {
+ let cases = [
+ "",
+ "hello world",
+ "hello `world`",
+ " hello",
+ " | { a: string }",
+ "\thello",
+ "hello\n\t\tindented",
+ " \t hello",
+ "hello\tworld",
+ "a\nb",
+ "a\n\nb",
+ "\nhello",
+ " | { a: string }\n | { b: number }",
+ "café ☕ naïve",
+ ];
+ for input in cases {
+ let mut escaper = MarkdownEscaper::new();
+ let precomputed: usize = input.bytes().map(|b| escaper.next(b).output_len()).sum();
+
+ let mut escaper = MarkdownEscaper::new();
+ let mut output = String::new();
+ for c in input.chars() {
+ escaper.next(c as u8).write_to(c, &mut output);
+ }
+
+ assert_eq!(precomputed, output.len(), "length mismatch for {:?}", input);
+ }
+ }
+
+ #[test]
+ fn test_escape_prevents_code_block() {
+ let diagnostic = " | { a: string }";
+ assert!(has_code_block(diagnostic));
+ assert!(!has_code_block(&Markdown::escape(diagnostic)));
+ }
+
#[track_caller]
fn assert_mappings(rendered: &RenderedText, expected: Vec<Vec<(usize, usize)>>) {
assert_eq!(rendered.lines.len(), expected.len(), "line count mismatch");
@@ -21,6 +21,7 @@ gpui.workspace = true
language.workspace = true
log.workspace = true
markdown.workspace = true
+project.workspace = true
settings.workspace = true
theme_settings.workspace = true
ui.workspace = true
@@ -1,4 +1,5 @@
use std::cmp::min;
+use std::ops::Range;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::Duration;
@@ -16,11 +17,15 @@ use markdown::{
CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownFont,
MarkdownOptions, MarkdownStyle,
};
+use project::search::SearchQuery;
use settings::Settings;
use theme_settings::ThemeSettings;
use ui::{WithScrollbar, prelude::*};
use util::normalize_path;
-use workspace::item::{Item, ItemHandle};
+use workspace::item::{Item, ItemBufferKind, ItemHandle};
+use workspace::searchable::{
+ Direction, SearchEvent, SearchOptions, SearchToken, SearchableItem, SearchableItemHandle,
+};
use workspace::{OpenOptions, OpenVisible, Pane, Workspace};
use crate::{
@@ -295,7 +300,7 @@ impl MarkdownPreviewView {
EditorEvent::Edited { .. }
| EditorEvent::BufferEdited { .. }
| EditorEvent::DirtyChanged
- | EditorEvent::ExcerptsEdited { .. } => {
+ | EditorEvent::BuffersEdited { .. } => {
this.update_markdown_from_active_editor(true, false, window, cx);
}
EditorEvent::SelectionsChanged { .. } => {
@@ -382,6 +387,7 @@ impl MarkdownPreviewView {
markdown.reset(contents, cx);
});
view.sync_preview_to_source_index(selection_start, should_reveal_selection, cx);
+ cx.emit(SearchEvent::MatchesInvalidated);
}
view.pending_update_task = None;
cx.notify();
@@ -751,6 +757,7 @@ impl Focusable for MarkdownPreviewView {
}
impl EventEmitter<()> for MarkdownPreviewView {}
+impl EventEmitter<SearchEvent> for MarkdownPreviewView {}
impl Item for MarkdownPreviewView {
type Event = ();
@@ -775,6 +782,18 @@ impl Item for MarkdownPreviewView {
}
fn to_item_events(_event: &Self::Event, _f: &mut dyn FnMut(workspace::item::ItemEvent)) {}
+
+ fn buffer_kind(&self, _cx: &App) -> ItemBufferKind {
+ ItemBufferKind::Singleton
+ }
+
+ fn as_searchable(
+ &self,
+ handle: &Entity<Self>,
+ _: &App,
+ ) -> Option<Box<dyn SearchableItemHandle>> {
+ Some(Box::new(handle.clone()))
+ }
}
impl Render for MarkdownPreviewView {
@@ -807,6 +826,140 @@ impl Render for MarkdownPreviewView {
}
}
+impl SearchableItem for MarkdownPreviewView {
+ type Match = Range<usize>;
+
+ fn supported_options(&self) -> SearchOptions {
+ SearchOptions {
+ case: true,
+ word: true,
+ regex: true,
+ replacement: false,
+ selection: false,
+ select_all: false,
+ find_in_results: false,
+ }
+ }
+
+ fn get_matches(&self, _window: &mut Window, cx: &mut App) -> (Vec<Self::Match>, SearchToken) {
+ (
+ self.markdown.read(cx).search_highlights().to_vec(),
+ SearchToken::default(),
+ )
+ }
+
+ fn clear_matches(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
+ let had_highlights = !self.markdown.read(cx).search_highlights().is_empty();
+ self.markdown.update(cx, |markdown, cx| {
+ markdown.clear_search_highlights(cx);
+ });
+ if had_highlights {
+ cx.emit(SearchEvent::MatchesInvalidated);
+ }
+ }
+
+ fn update_matches(
+ &mut self,
+ matches: &[Self::Match],
+ active_match_index: Option<usize>,
+ _token: SearchToken,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let old_highlights = self.markdown.read(cx).search_highlights();
+ let changed = old_highlights != matches;
+ self.markdown.update(cx, |markdown, cx| {
+ markdown.set_search_highlights(matches.to_vec(), active_match_index, cx);
+ });
+ if changed {
+ cx.emit(SearchEvent::MatchesInvalidated);
+ }
+ }
+
+ fn query_suggestion(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> String {
+ self.markdown.read(cx).selected_text().unwrap_or_default()
+ }
+
+ fn activate_match(
+ &mut self,
+ index: usize,
+ matches: &[Self::Match],
+ _token: SearchToken,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if let Some(match_range) = matches.get(index) {
+ let start = match_range.start;
+ self.markdown.update(cx, |markdown, cx| {
+ markdown.set_active_search_highlight(Some(index), cx);
+ markdown.request_autoscroll_to_source_index(start, cx);
+ });
+ cx.emit(SearchEvent::ActiveMatchChanged);
+ }
+ }
+
+ fn select_matches(
+ &mut self,
+ _matches: &[Self::Match],
+ _token: SearchToken,
+ _window: &mut Window,
+ _cx: &mut Context<Self>,
+ ) {
+ }
+
+ fn replace(
+ &mut self,
+ _: &Self::Match,
+ _: &SearchQuery,
+ _token: SearchToken,
+ _window: &mut Window,
+ _: &mut Context<Self>,
+ ) {
+ }
+
+ fn find_matches(
+ &mut self,
+ query: Arc<SearchQuery>,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Task<Vec<Self::Match>> {
+ let source = self.markdown.read(cx).source().to_string();
+ cx.background_spawn(async move { query.search_str(&source) })
+ }
+
+ fn active_match_index(
+ &mut self,
+ direction: Direction,
+ matches: &[Self::Match],
+ _token: SearchToken,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Option<usize> {
+ if matches.is_empty() {
+ return None;
+ }
+
+ let markdown = self.markdown.read(cx);
+ let current_source_index = markdown
+ .active_search_highlight()
+ .and_then(|i| markdown.search_highlights().get(i))
+ .map(|m| m.start)
+ .or(self.active_source_index)
+ .unwrap_or(0);
+
+ match direction {
+ Direction::Next => matches
+ .iter()
+ .position(|m| m.start >= current_source_index)
+ .or(Some(0)),
+ Direction::Prev => matches
+ .iter()
+ .rposition(|m| m.start <= current_source_index)
+ .or(Some(matches.len().saturating_sub(1))),
+ }
+ }
+}
+
#[cfg(test)]
mod tests {
use crate::markdown_preview_view::ImageSource;
@@ -316,3 +316,15 @@ pub(crate) mod m_2026_03_23 {
pub(crate) use keymap::KEYMAP_PATTERNS;
}
+
+pub(crate) mod m_2026_03_30 {
+ mod settings;
+
+ pub(crate) use settings::make_play_sound_when_agent_done_an_enum;
+}
+
+pub(crate) mod m_2026_04_01 {
+ mod settings;
+
+ pub(crate) use settings::restructure_profiles_with_settings_key;
+}
@@ -0,0 +1,29 @@
+use anyhow::Result;
+use serde_json::Value;
+
+use crate::migrations::migrate_settings;
+
+pub fn make_play_sound_when_agent_done_an_enum(value: &mut Value) -> Result<()> {
+ migrate_settings(value, &mut migrate_one)
+}
+
+fn migrate_one(obj: &mut serde_json::Map<String, Value>) -> Result<()> {
+ let Some(play_sound) = obj
+ .get_mut("agent")
+ .and_then(|agent| agent.as_object_mut())
+ .and_then(|agent| agent.get_mut("play_sound_when_agent_done"))
+ else {
+ return Ok(());
+ };
+
+ *play_sound = match play_sound {
+ Value::Bool(true) => Value::String("always".to_string()),
+ Value::Bool(false) => Value::String("never".to_string()),
+ Value::String(s) if s == "never" || s == "when_hidden" || s == "always" => return Ok(()),
+ _ => {
+ anyhow::bail!("Expected play_sound_when_agent_done to be a boolean or valid enum value")
+ }
+ };
+
+ Ok(())
+}
@@ -0,0 +1,29 @@
+use anyhow::Result;
+use serde_json::Value;
+
+pub fn restructure_profiles_with_settings_key(value: &mut Value) -> Result<()> {
+ let Some(root_object) = value.as_object_mut() else {
+ return Ok(());
+ };
+
+ let Some(profiles) = root_object.get_mut("profiles") else {
+ return Ok(());
+ };
+
+ let Some(profiles_map) = profiles.as_object_mut() else {
+ return Ok(());
+ };
+
+ for profile_value in profiles_map.values_mut() {
+ if profile_value
+ .as_object()
+ .is_some_and(|m| m.contains_key("settings") || m.contains_key("base"))
+ {
+ continue;
+ }
+
+ *profile_value = serde_json::json!({ "settings": profile_value });
+ }
+
+ Ok(())
+}
@@ -247,6 +247,8 @@ pub fn migrate_settings(text: &str) -> Result<Option<String>> {
migrations::m_2026_03_16::SETTINGS_PATTERNS,
&SETTINGS_QUERY_2026_03_16,
),
+ MigrationType::Json(migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum),
+ MigrationType::Json(migrations::m_2026_04_01::restructure_profiles_with_settings_key),
];
run_migrations(text, migrations)
}
@@ -2400,6 +2402,132 @@ mod tests {
);
}
+ #[test]
+ fn test_make_play_sound_when_agent_done_an_enum() {
+ assert_migrate_with_migrations(
+ &[MigrationType::Json(
+ migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum,
+ )],
+ &r#"{ }"#.unindent(),
+ None,
+ );
+
+ assert_migrate_with_migrations(
+ &[MigrationType::Json(
+ migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum,
+ )],
+ &r#"{
+ "agent": {
+ "play_sound_when_agent_done": true
+ }
+ }"#
+ .unindent(),
+ Some(
+ &r#"{
+ "agent": {
+ "play_sound_when_agent_done": "always"
+ }
+ }"#
+ .unindent(),
+ ),
+ );
+
+ assert_migrate_with_migrations(
+ &[MigrationType::Json(
+ migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum,
+ )],
+ &r#"{
+ "agent": {
+ "play_sound_when_agent_done": false
+ }
+ }"#
+ .unindent(),
+ Some(
+ &r#"{
+ "agent": {
+ "play_sound_when_agent_done": "never"
+ }
+ }"#
+ .unindent(),
+ ),
+ );
+
+ assert_migrate_with_migrations(
+ &[MigrationType::Json(
+ migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum,
+ )],
+ &r#"{
+ "agent": {
+ "play_sound_when_agent_done": "when_hidden"
+ }
+ }"#
+ .unindent(),
+ None,
+ );
+
+ // Platform key: settings nested inside "macos" should be migrated
+ assert_migrate_with_migrations(
+ &[MigrationType::Json(
+ migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum,
+ )],
+ &r#"
+ {
+ "macos": {
+ "agent": {
+ "play_sound_when_agent_done": true
+ }
+ }
+ }
+ "#
+ .unindent(),
+ Some(
+ &r#"
+ {
+ "macos": {
+ "agent": {
+ "play_sound_when_agent_done": "always"
+ }
+ }
+ }
+ "#
+ .unindent(),
+ ),
+ );
+
+ // Profile: settings nested inside profiles should be migrated
+ assert_migrate_with_migrations(
+ &[MigrationType::Json(
+ migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum,
+ )],
+ &r#"
+ {
+ "profiles": {
+ "work": {
+ "agent": {
+ "play_sound_when_agent_done": false
+ }
+ }
+ }
+ }
+ "#
+ .unindent(),
+ Some(
+ &r#"
+ {
+ "profiles": {
+ "work": {
+ "agent": {
+ "play_sound_when_agent_done": "never"
+ }
+ }
+ }
+ }
+ "#
+ .unindent(),
+ ),
+ );
+ }
+
#[test]
fn test_remove_context_server_source() {
assert_migrate_settings(
@@ -4480,4 +4608,78 @@ mod tests {
),
);
}
+
+ #[test]
+ fn test_restructure_profiles_with_settings_key() {
+ assert_migrate_settings(
+ &r#"
+ {
+ "buffer_font_size": 14,
+ "profiles": {
+ "Presenting": {
+ "buffer_font_size": 20,
+ "theme": "One Light"
+ },
+ "Minimal": {
+ "vim_mode": true
+ }
+ }
+ }
+ "#
+ .unindent(),
+ Some(
+ &r#"
+ {
+ "buffer_font_size": 14,
+ "profiles": {
+ "Presenting": {
+ "settings": {
+ "buffer_font_size": 20,
+ "theme": "One Light"
+ }
+ },
+ "Minimal": {
+ "settings": {
+ "vim_mode": true
+ }
+ }
+ }
+ }
+ "#
+ .unindent(),
+ ),
+ );
+ }
+
+ #[test]
+ fn test_restructure_profiles_with_settings_key_already_migrated() {
+ assert_migrate_settings(
+ &r#"
+ {
+ "profiles": {
+ "Presenting": {
+ "settings": {
+ "buffer_font_size": 20
+ }
+ }
+ }
+ }
+ "#
+ .unindent(),
+ None,
+ );
+ }
+
+ #[test]
+ fn test_restructure_profiles_with_settings_key_no_profiles() {
+ assert_migrate_settings(
+ &r#"
+ {
+ "buffer_font_size": 14
+ }
+ "#
+ .unindent(),
+ None,
+ );
+ }
}
@@ -1,192 +1,331 @@
-use crate::{MultiBufferDimension, MultiBufferOffset, MultiBufferOffsetUtf16};
+use crate::{
+ ExcerptSummary, MultiBufferDimension, MultiBufferOffset, MultiBufferOffsetUtf16, PathKey,
+ PathKeyIndex, find_diff_state,
+};
-use super::{ExcerptId, MultiBufferSnapshot, ToOffset, ToPoint};
-use language::Point;
+use super::{MultiBufferSnapshot, ToOffset, ToPoint};
+use language::{BufferSnapshot, Point};
use std::{
cmp::Ordering,
ops::{Add, AddAssign, Range, Sub},
};
use sum_tree::Bias;
+use text::BufferId;
+
+/// A multibuffer anchor derived from an anchor into a specific excerpted buffer.
+#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
+pub struct ExcerptAnchor {
+ pub(crate) text_anchor: text::Anchor,
+ pub(crate) path: PathKeyIndex,
+ pub(crate) diff_base_anchor: Option<text::Anchor>,
+}
/// A stable reference to a position within a [`MultiBuffer`](super::MultiBuffer).
///
/// Unlike simple offsets, anchors remain valid as the text is edited, automatically
/// adjusting to reflect insertions and deletions around them.
#[derive(Clone, Copy, Eq, PartialEq, Hash)]
-pub struct Anchor {
- /// Identifies which excerpt within the multi-buffer this anchor belongs to.
- /// A multi-buffer can contain multiple excerpts from different buffers.
- pub excerpt_id: ExcerptId,
- /// The position within the excerpt's underlying buffer. This is a stable
- /// reference that remains valid as the buffer text is edited.
- pub text_anchor: text::Anchor,
- /// When present, indicates this anchor points into deleted text within an
- /// expanded diff hunk. The anchor references a position in the diff base
- /// (original) text rather than the current buffer text. This is used when
- /// displaying inline diffs where deleted lines are shown.
- pub diff_base_anchor: Option<text::Anchor>,
+pub enum Anchor {
+ /// An anchor that always resolves to the start of the multibuffer.
+ Min,
+ /// An anchor that's attached to a specific excerpted buffer.
+ Excerpt(ExcerptAnchor),
+ /// An anchor that always resolves to the end of the multibuffer.
+ Max,
}
-impl std::fmt::Debug for Anchor {
+pub(crate) enum AnchorSeekTarget {
+ Excerpt {
+ path_key: PathKey,
+ anchor: ExcerptAnchor,
+ // None when the buffer no longer exists in the multibuffer
+ snapshot: Option<BufferSnapshot>,
+ },
+ Empty,
+}
+
+impl std::fmt::Debug for AnchorSeekTarget {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- if self.is_min() {
- return write!(f, "Anchor::min({:?})", self.text_anchor.buffer_id);
+ match self {
+ Self::Excerpt {
+ path_key,
+ anchor,
+ snapshot: _,
+ } => f
+ .debug_struct("Excerpt")
+ .field("path_key", path_key)
+ .field("anchor", anchor)
+ .finish(),
+ Self::Empty => write!(f, "Empty"),
}
- if self.is_max() {
- return write!(f, "Anchor::max({:?})", self.text_anchor.buffer_id);
+ }
+}
+
+impl std::fmt::Debug for Anchor {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Anchor::Min => write!(f, "Anchor::Min"),
+ Anchor::Max => write!(f, "Anchor::Max"),
+ Anchor::Excerpt(excerpt_anchor) => write!(f, "{excerpt_anchor:?}"),
}
+ }
+}
- f.debug_struct("Anchor")
- .field("excerpt_id", &self.excerpt_id)
- .field("text_anchor", &self.text_anchor)
- .field("diff_base_anchor", &self.diff_base_anchor)
- .finish()
+impl From<ExcerptAnchor> for Anchor {
+ fn from(anchor: ExcerptAnchor) -> Self {
+ Anchor::Excerpt(anchor)
}
}
-impl Anchor {
- pub fn with_diff_base_anchor(self, diff_base_anchor: text::Anchor) -> Self {
- Self {
- diff_base_anchor: Some(diff_base_anchor),
- ..self
+impl ExcerptAnchor {
+ pub(crate) fn buffer_id(&self) -> BufferId {
+ self.text_anchor.buffer_id
+ }
+
+ pub(crate) fn text_anchor(&self) -> text::Anchor {
+ self.text_anchor
+ }
+
+ pub(crate) fn with_diff_base_anchor(mut self, diff_base_anchor: text::Anchor) -> Self {
+ self.diff_base_anchor = Some(diff_base_anchor);
+ self
+ }
+
+ pub(crate) fn cmp(&self, other: &Self, snapshot: &MultiBufferSnapshot) -> Ordering {
+ let Some(self_path_key) = snapshot.path_keys_by_index.get(&self.path) else {
+ panic!("anchor's path was never added to multibuffer")
+ };
+ let Some(other_path_key) = snapshot.path_keys_by_index.get(&other.path) else {
+ panic!("anchor's path was never added to multibuffer")
+ };
+
+ if self_path_key.cmp(other_path_key) != Ordering::Equal {
+ return self_path_key.cmp(other_path_key);
+ }
+
+ // in the case that you removed the buffer containing self,
+ // and added the buffer containing other with the same path key
+ // (ordering is arbitrary but consistent)
+ if self.text_anchor.buffer_id != other.text_anchor.buffer_id {
+ return self.text_anchor.buffer_id.cmp(&other.text_anchor.buffer_id);
+ }
+
+ let Some(buffer) = snapshot.buffer_for_path(&self_path_key) else {
+ return Ordering::Equal;
+ };
+ // Comparing two anchors into buffer A that formerly existed at path P,
+ // when path P has since been reused for a different buffer B
+ if buffer.remote_id() != self.text_anchor.buffer_id {
+ return Ordering::Equal;
+ };
+ assert_eq!(self.text_anchor.buffer_id, buffer.remote_id());
+ let text_cmp = self.text_anchor().cmp(&other.text_anchor(), buffer);
+ if text_cmp != Ordering::Equal {
+ return text_cmp;
+ }
+
+ if (self.diff_base_anchor.is_some() || other.diff_base_anchor.is_some())
+ && let Some(base_text) = find_diff_state(&snapshot.diffs, self.text_anchor.buffer_id)
+ .map(|diff| diff.base_text())
+ {
+ let self_anchor = self.diff_base_anchor.filter(|a| a.is_valid(base_text));
+ let other_anchor = other.diff_base_anchor.filter(|a| a.is_valid(base_text));
+ return match (self_anchor, other_anchor) {
+ (Some(a), Some(b)) => a.cmp(&b, base_text),
+ (Some(_), None) => match other.text_anchor().bias {
+ Bias::Left => Ordering::Greater,
+ Bias::Right => Ordering::Less,
+ },
+ (None, Some(_)) => match self.text_anchor().bias {
+ Bias::Left => Ordering::Less,
+ Bias::Right => Ordering::Greater,
+ },
+ (None, None) => Ordering::Equal,
+ };
}
+
+ Ordering::Equal
}
- pub fn in_buffer(excerpt_id: ExcerptId, text_anchor: text::Anchor) -> Self {
- Self {
- excerpt_id,
- text_anchor,
- diff_base_anchor: None,
+ fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Self {
+ if self.text_anchor.bias == Bias::Left {
+ return *self;
+ }
+ let Some(buffer) = snapshot.buffer_for_id(self.text_anchor.buffer_id) else {
+ return *self;
+ };
+ let text_anchor = self.text_anchor().bias_left(&buffer);
+ let ret = Self::in_buffer(self.path, text_anchor);
+ if let Some(diff_base_anchor) = self.diff_base_anchor {
+ if let Some(diff) = find_diff_state(&snapshot.diffs, self.text_anchor.buffer_id)
+ && diff_base_anchor.is_valid(&diff.base_text())
+ {
+ ret.with_diff_base_anchor(diff_base_anchor.bias_left(diff.base_text()))
+ } else {
+ ret.with_diff_base_anchor(diff_base_anchor)
+ }
+ } else {
+ ret
}
}
- pub fn range_in_buffer(excerpt_id: ExcerptId, range: Range<text::Anchor>) -> Range<Self> {
- Self::in_buffer(excerpt_id, range.start)..Self::in_buffer(excerpt_id, range.end)
+ fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Self {
+ if self.text_anchor.bias == Bias::Right {
+ return *self;
+ }
+ let Some(buffer) = snapshot.buffer_for_id(self.text_anchor.buffer_id) else {
+ return *self;
+ };
+ let text_anchor = self.text_anchor().bias_right(&buffer);
+ let ret = Self::in_buffer(self.path, text_anchor);
+ if let Some(diff_base_anchor) = self.diff_base_anchor {
+ if let Some(diff) = find_diff_state(&snapshot.diffs, self.text_anchor.buffer_id)
+ && diff_base_anchor.is_valid(&diff.base_text())
+ {
+ ret.with_diff_base_anchor(diff_base_anchor.bias_right(diff.base_text()))
+ } else {
+ ret.with_diff_base_anchor(diff_base_anchor)
+ }
+ } else {
+ ret
+ }
}
- pub fn min() -> Self {
- Self {
- excerpt_id: ExcerptId::min(),
- text_anchor: text::Anchor::MIN,
+ #[track_caller]
+ pub(crate) fn in_buffer(path: PathKeyIndex, text_anchor: text::Anchor) -> Self {
+ ExcerptAnchor {
+ path,
diff_base_anchor: None,
+ text_anchor,
}
}
- pub fn max() -> Self {
- Self {
- excerpt_id: ExcerptId::max(),
- text_anchor: text::Anchor::MAX,
- diff_base_anchor: None,
+ fn is_valid(&self, snapshot: &MultiBufferSnapshot) -> bool {
+ let Some(target) = self.try_seek_target(snapshot) else {
+ return false;
+ };
+ let Some(buffer_snapshot) = snapshot.buffer_for_id(self.buffer_id()) else {
+ return false;
+ };
+ // Early check to avoid invalid comparisons when seeking
+ if !buffer_snapshot.can_resolve(&self.text_anchor) {
+ return false;
}
+ let mut cursor = snapshot.excerpts.cursor::<ExcerptSummary>(());
+ cursor.seek(&target, Bias::Left);
+ let Some(excerpt) = cursor.item() else {
+ return false;
+ };
+ let is_valid = self.text_anchor == excerpt.range.context.start
+ || self.text_anchor == excerpt.range.context.end
+ || self.text_anchor.is_valid(&buffer_snapshot);
+ is_valid
+ && excerpt
+ .range
+ .context
+ .start
+ .cmp(&self.text_anchor(), buffer_snapshot)
+ .is_le()
+ && excerpt
+ .range
+ .context
+ .end
+ .cmp(&self.text_anchor(), buffer_snapshot)
+ .is_ge()
+ }
+
+ pub(crate) fn seek_target(&self, snapshot: &MultiBufferSnapshot) -> AnchorSeekTarget {
+ self.try_seek_target(snapshot)
+ .expect("anchor is from different multi-buffer")
+ }
+
+ pub(crate) fn try_seek_target(
+ &self,
+ snapshot: &MultiBufferSnapshot,
+ ) -> Option<AnchorSeekTarget> {
+ let path_key = snapshot.try_path_for_anchor(*self)?;
+ let buffer = snapshot.buffer_for_path(&path_key).cloned();
+ Some(AnchorSeekTarget::Excerpt {
+ path_key,
+ anchor: *self,
+ snapshot: buffer,
+ })
+ }
+}
+
+impl ToOffset for ExcerptAnchor {
+ fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> MultiBufferOffset {
+ Anchor::from(*self).to_offset(snapshot)
+ }
+
+ fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> MultiBufferOffsetUtf16 {
+ Anchor::from(*self).to_offset_utf16(snapshot)
+ }
+}
+
+impl ToPoint for ExcerptAnchor {
+ fn to_point(&self, snapshot: &MultiBufferSnapshot) -> Point {
+ Anchor::from(*self).to_point(snapshot)
}
+ fn to_point_utf16(&self, snapshot: &MultiBufferSnapshot) -> rope::PointUtf16 {
+ Anchor::from(*self).to_point_utf16(snapshot)
+ }
+}
+
+impl Anchor {
pub fn is_min(&self) -> bool {
- self.excerpt_id == ExcerptId::min()
- && self.text_anchor.is_min()
- && self.diff_base_anchor.is_none()
+ matches!(self, Self::Min)
}
pub fn is_max(&self) -> bool {
- self.excerpt_id == ExcerptId::max()
- && self.text_anchor.is_max()
- && self.diff_base_anchor.is_none()
+ matches!(self, Self::Max)
}
- pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering {
- if self == other {
- return Ordering::Equal;
- }
+ pub(crate) fn in_buffer(path: PathKeyIndex, text_anchor: text::Anchor) -> Self {
+ Self::Excerpt(ExcerptAnchor::in_buffer(path, text_anchor))
+ }
- let self_excerpt_id = snapshot.latest_excerpt_id(self.excerpt_id);
- let other_excerpt_id = snapshot.latest_excerpt_id(other.excerpt_id);
+ pub(crate) fn range_in_buffer(path: PathKeyIndex, range: Range<text::Anchor>) -> Range<Self> {
+ Self::in_buffer(path, range.start)..Self::in_buffer(path, range.end)
+ }
- let excerpt_id_cmp = self_excerpt_id.cmp(&other_excerpt_id, snapshot);
- if excerpt_id_cmp.is_ne() {
- return excerpt_id_cmp;
- }
- if self_excerpt_id == ExcerptId::max()
- && self.text_anchor.is_max()
- && self.text_anchor.is_max()
- && self.diff_base_anchor.is_none()
- && other.diff_base_anchor.is_none()
- {
- return Ordering::Equal;
- }
- if let Some(excerpt) = snapshot.excerpt(self_excerpt_id) {
- let text_cmp = self.text_anchor.cmp(&other.text_anchor, &excerpt.buffer);
- if text_cmp.is_ne() {
- return text_cmp;
- }
- if (self.diff_base_anchor.is_some() || other.diff_base_anchor.is_some())
- && let Some(base_text) = snapshot
- .diff_state(excerpt.buffer_id)
- .map(|diff| diff.base_text())
- {
- let self_anchor = self.diff_base_anchor.filter(|a| a.is_valid(base_text));
- let other_anchor = other.diff_base_anchor.filter(|a| a.is_valid(base_text));
- return match (self_anchor, other_anchor) {
- (Some(a), Some(b)) => a.cmp(&b, base_text),
- (Some(_), None) => match other.text_anchor.bias {
- Bias::Left => Ordering::Greater,
- Bias::Right => Ordering::Less,
- },
- (None, Some(_)) => match self.text_anchor.bias {
- Bias::Left => Ordering::Less,
- Bias::Right => Ordering::Greater,
- },
- (None, None) => Ordering::Equal,
- };
+ pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering {
+ match (self, other) {
+ (Anchor::Min, Anchor::Min) => return Ordering::Equal,
+ (Anchor::Max, Anchor::Max) => return Ordering::Equal,
+ (Anchor::Min, _) => return Ordering::Less,
+ (Anchor::Max, _) => return Ordering::Greater,
+ (_, Anchor::Max) => return Ordering::Less,
+ (_, Anchor::Min) => return Ordering::Greater,
+ (Anchor::Excerpt(self_excerpt_anchor), Anchor::Excerpt(other_excerpt_anchor)) => {
+ self_excerpt_anchor.cmp(other_excerpt_anchor, snapshot)
}
}
- Ordering::Equal
}
pub fn bias(&self) -> Bias {
- self.text_anchor.bias
+ match self {
+ Anchor::Min => Bias::Left,
+ Anchor::Max => Bias::Right,
+ Anchor::Excerpt(anchor) => anchor.text_anchor.bias,
+ }
}
pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
- if self.text_anchor.bias != Bias::Left
- && let Some(excerpt) = snapshot.excerpt(self.excerpt_id)
- {
- return Self {
- excerpt_id: excerpt.id,
- text_anchor: self.text_anchor.bias_left(&excerpt.buffer),
- diff_base_anchor: self.diff_base_anchor.map(|a| {
- if let Some(base_text) = snapshot
- .diff_state(excerpt.buffer_id)
- .map(|diff| diff.base_text())
- && a.is_valid(&base_text)
- {
- return a.bias_left(base_text);
- }
- a
- }),
- };
+ match self {
+ Anchor::Min => *self,
+ Anchor::Max => snapshot.anchor_before(snapshot.max_point()),
+ Anchor::Excerpt(anchor) => Anchor::Excerpt(anchor.bias_left(snapshot)),
}
- *self
}
pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
- if self.text_anchor.bias != Bias::Right
- && let Some(excerpt) = snapshot.excerpt(self.excerpt_id)
- {
- return Self {
- excerpt_id: excerpt.id,
- text_anchor: self.text_anchor.bias_right(&excerpt.buffer),
- diff_base_anchor: self.diff_base_anchor.map(|a| {
- if let Some(base_text) = snapshot
- .diff_state(excerpt.buffer_id)
- .map(|diff| diff.base_text())
- && a.is_valid(&base_text)
- {
- return a.bias_right(base_text);
- }
- a
- }),
- };
+ match self {
+ Anchor::Max => *self,
+ Anchor::Min => snapshot.anchor_after(Point::zero()),
+ Anchor::Excerpt(anchor) => Anchor::Excerpt(anchor.bias_right(snapshot)),
}
- *self
}
pub fn summary<D>(&self, snapshot: &MultiBufferSnapshot) -> D
@@ -203,16 +342,111 @@ impl Anchor {
}
pub fn is_valid(&self, snapshot: &MultiBufferSnapshot) -> bool {
- if self.is_min() || self.is_max() {
- true
- } else if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
- (self.text_anchor == excerpt.range.context.start
- || self.text_anchor == excerpt.range.context.end
- || self.text_anchor.is_valid(&excerpt.buffer))
- && excerpt.contains(self)
- } else {
- false
+ match self {
+ Anchor::Min | Anchor::Max => true,
+ Anchor::Excerpt(excerpt_anchor) => excerpt_anchor.is_valid(snapshot),
+ }
+ }
+
+ fn to_excerpt_anchor(&self, snapshot: &MultiBufferSnapshot) -> Option<ExcerptAnchor> {
+ match self {
+ Anchor::Min => {
+ let excerpt = snapshot.excerpts.first()?;
+
+ Some(ExcerptAnchor {
+ text_anchor: excerpt.range.context.start,
+ path: excerpt.path_key_index,
+ diff_base_anchor: None,
+ })
+ }
+ Anchor::Excerpt(excerpt_anchor) => Some(*excerpt_anchor),
+ Anchor::Max => {
+ let excerpt = snapshot.excerpts.last()?;
+
+ Some(ExcerptAnchor {
+ text_anchor: excerpt.range.context.end,
+ path: excerpt.path_key_index,
+ diff_base_anchor: None,
+ })
+ }
+ }
+ }
+
+ pub(crate) fn seek_target(&self, snapshot: &MultiBufferSnapshot) -> AnchorSeekTarget {
+ let Some(excerpt_anchor) = self.to_excerpt_anchor(snapshot) else {
+ return AnchorSeekTarget::Empty;
+ };
+
+ excerpt_anchor.seek_target(snapshot)
+ }
+
+ pub(crate) fn excerpt_anchor(&self) -> Option<ExcerptAnchor> {
+ match self {
+ Anchor::Min | Anchor::Max => None,
+ Anchor::Excerpt(excerpt_anchor) => Some(*excerpt_anchor),
+ }
+ }
+
+ pub(crate) fn text_anchor(&self) -> Option<text::Anchor> {
+ match self {
+ Anchor::Min | Anchor::Max => None,
+ Anchor::Excerpt(excerpt_anchor) => Some(excerpt_anchor.text_anchor()),
+ }
+ }
+
+ pub fn opaque_id(&self) -> Option<[u8; 20]> {
+ self.text_anchor().map(|a| a.opaque_id())
+ }
+
+ /// Note: anchor_to_buffer_anchor is probably what you want
+ pub fn raw_text_anchor(&self) -> Option<text::Anchor> {
+ match self {
+ Anchor::Min | Anchor::Max => None,
+ Anchor::Excerpt(excerpt_anchor) => Some(excerpt_anchor.text_anchor),
+ }
+ }
+
+ pub(crate) fn try_seek_target(
+ &self,
+ snapshot: &MultiBufferSnapshot,
+ ) -> Option<AnchorSeekTarget> {
+ let Some(excerpt_anchor) = self.to_excerpt_anchor(snapshot) else {
+ return Some(AnchorSeekTarget::Empty);
+ };
+ excerpt_anchor.try_seek_target(snapshot)
+ }
+
+ /// Returns the text anchor for this anchor.
+ /// Panics if the anchor is from a different buffer.
+ pub fn text_anchor_in(&self, buffer: &BufferSnapshot) -> text::Anchor {
+ match self {
+ Anchor::Min => text::Anchor::min_for_buffer(buffer.remote_id()),
+ Anchor::Excerpt(excerpt_anchor) => {
+ let text_anchor = excerpt_anchor.text_anchor;
+ assert_eq!(text_anchor.buffer_id, buffer.remote_id());
+ text_anchor
+ }
+ Anchor::Max => text::Anchor::max_for_buffer(buffer.remote_id()),
+ }
+ }
+
+ pub fn diff_base_anchor(&self) -> Option<text::Anchor> {
+ self.excerpt_anchor()?.diff_base_anchor
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn expect_text_anchor(&self) -> text::Anchor {
+ self.excerpt_anchor().unwrap().text_anchor
+ }
+
+ pub fn with_diff_base_anchor(mut self, diff_base_anchor: text::Anchor) -> Self {
+ match &mut self {
+ Anchor::Min | Anchor::Max => {}
+ Anchor::Excerpt(excerpt_anchor) => {
+ excerpt_anchor.diff_base_anchor = Some(diff_base_anchor);
+ }
}
+ self
}
}
@@ -8,6 +8,7 @@ use self::transaction::History;
pub use anchor::{Anchor, AnchorRangeExt};
+use anchor::{AnchorSeekTarget, ExcerptAnchor};
use anyhow::{Result, anyhow};
use buffer_diff::{
BufferDiff, BufferDiffEvent, BufferDiffSnapshot, DiffChanged, DiffHunkSecondaryStatus,
@@ -15,14 +16,14 @@ use buffer_diff::{
};
use clock::ReplicaId;
use collections::{BTreeMap, Bound, HashMap, HashSet};
-use gpui::{App, Context, Entity, EntityId, EventEmitter};
+use gpui::{App, Context, Entity, EventEmitter};
use itertools::Itertools;
use language::{
- AutoindentMode, BracketMatch, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability,
- CharClassifier, CharKind, CharScopeContext, Chunk, CursorShape, DiagnosticEntryRef, File,
- IndentGuideSettings, IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline,
- OutlineItem, Point, PointUtf16, Selection, TextDimension, TextObject, ToOffset as _,
- ToPoint as _, TransactionId, TreeSitterOptions, Unclipped,
+ AutoindentMode, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability, CharClassifier,
+ CharKind, CharScopeContext, Chunk, CursorShape, DiagnosticEntryRef, File, IndentGuideSettings,
+ IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline, OutlineItem, Point,
+ PointUtf16, Selection, TextDimension, TextObject, ToOffset as _, ToPoint as _, TransactionId,
+ TreeSitterOptions, Unclipped,
language_settings::{AllLanguageSettings, LanguageSettings},
};
@@ -37,7 +38,8 @@ use std::{
any::type_name,
borrow::Cow,
cell::{Cell, OnceCell, Ref, RefCell},
- cmp, fmt,
+ cmp::{self, Ordering},
+ fmt,
future::Future,
io,
iter::{self, FromIterator},
@@ -51,15 +53,13 @@ use std::{
use sum_tree::{Bias, Cursor, Dimension, Dimensions, SumTree, TreeMap};
use text::{
BufferId, Edit, LineIndent, TextSummary,
- locator::Locator,
subscription::{Subscription, Topic},
};
use theme::SyntaxTheme;
use unicode_segmentation::UnicodeSegmentation;
-use util::post_inc;
use ztracing::instrument;
-pub use self::path_key::{PathExcerptInsertResult, PathKey};
+pub use self::path_key::PathKey;
pub static EXCERPT_CONTEXT_LINES: OnceLock<fn(&App) -> u32> = OnceLock::new();
@@ -67,9 +67,6 @@ pub fn excerpt_context_lines(cx: &App) -> u32 {
EXCERPT_CONTEXT_LINES.get().map(|f| f(cx)).unwrap_or(2)
}
-#[derive(Debug, Default, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
-pub struct ExcerptId(u32);
-
/// One or more [`Buffers`](Buffer) being edited in a single view.
///
/// See <https://zed.dev/features#multi-buffers>
@@ -79,10 +76,6 @@ pub struct MultiBuffer {
snapshot: RefCell<MultiBufferSnapshot>,
/// Contains the state of the buffers being edited
buffers: BTreeMap<BufferId, BufferState>,
- /// Mapping from path keys to their excerpts.
- excerpts_by_path: BTreeMap<PathKey, Vec<ExcerptId>>,
- /// Mapping from excerpt IDs to their path key.
- paths_by_excerpt: HashMap<ExcerptId, PathKey>,
/// Mapping from buffer IDs to their diff states
diffs: HashMap<BufferId, DiffState>,
subscriptions: Topic<MultiBufferOffset>,
@@ -98,24 +91,20 @@ pub struct MultiBuffer {
buffer_changed_since_sync: Rc<Cell<bool>>,
}
+#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+struct PathKeyIndex(u64);
+
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Event {
- ExcerptsAdded {
+ BufferRangesUpdated {
buffer: Entity<Buffer>,
- predecessor: ExcerptId,
- excerpts: Vec<(ExcerptId, ExcerptRange<language::Anchor>)>,
+ path_key: PathKey,
+ ranges: Vec<ExcerptRange<text::Anchor>>,
},
- ExcerptsRemoved {
- ids: Vec<ExcerptId>,
- /// Contains only buffer IDs for which all excerpts have been removed.
- /// Buffers that still have remaining excerpts are never included.
+ BuffersRemoved {
removed_buffer_ids: Vec<BufferId>,
},
- ExcerptsExpanded {
- ids: Vec<ExcerptId>,
- },
- ExcerptsEdited {
- excerpt_ids: Vec<ExcerptId>,
+ BuffersEdited {
buffer_ids: Vec<BufferId>,
},
DiffHunksToggled,
@@ -145,14 +134,14 @@ pub struct MultiBufferDiffHunk {
pub buffer_id: BufferId,
/// The range of the underlying buffer that this hunk corresponds to.
pub buffer_range: Range<text::Anchor>,
- /// The excerpt that contains the diff hunk.
- pub excerpt_id: ExcerptId,
/// The range within the buffer's diff base that this hunk corresponds to.
pub diff_base_byte_range: Range<BufferOffset>,
/// The status of this hunk (added/modified/deleted and secondary status).
pub status: DiffHunkStatus,
/// The word diffs for this hunk.
pub word_diffs: Vec<Range<MultiBufferOffset>>,
+ pub excerpt_range: ExcerptRange<text::Anchor>,
+ pub multi_buffer_range: Range<Anchor>,
}
impl MultiBufferDiffHunk {
@@ -165,17 +154,12 @@ impl MultiBufferDiffHunk {
&& self.buffer_range.start.is_min()
&& self.buffer_range.end.is_max()
}
-
- pub fn multi_buffer_range(&self) -> Range<Anchor> {
- let start = Anchor::in_buffer(self.excerpt_id, self.buffer_range.start);
- let end = Anchor::in_buffer(self.excerpt_id, self.buffer_range.end);
- start..end
- }
}
pub type MultiBufferPoint = Point;
+/// ExcerptOffset is offset into the non-deleted text of the multibuffer
type ExcerptOffset = ExcerptDimension<MultiBufferOffset>;
-type ExcerptPoint = ExcerptDimension<Point>;
+/// ExcerptOffset is based on the non-deleted text of the multibuffer
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq, Hash, serde::Deserialize)]
#[serde(transparent)]
@@ -518,10 +502,6 @@ pub trait ToPoint: 'static + fmt::Debug {
struct BufferState {
buffer: Entity<Buffer>,
- last_version: RefCell<clock::Global>,
- last_non_text_state_update_count: Cell<usize>,
- // Note, any changes to this field value require updating snapshot.buffer_locators as well
- excerpts: Vec<Locator>,
_subscriptions: [gpui::Subscription; 2],
}
@@ -694,15 +674,31 @@ impl DiffState {
}
}
+#[derive(Clone)]
+struct BufferStateSnapshot {
+ path_key: PathKey,
+ path_key_index: PathKeyIndex,
+ buffer_snapshot: BufferSnapshot,
+}
+
+impl fmt::Debug for BufferStateSnapshot {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("BufferStateSnapshot")
+ .field("path_key", &self.path_key)
+ .field("buffer_id", &self.buffer_snapshot.remote_id())
+ .finish()
+ }
+}
+
/// The contents of a [`MultiBuffer`] at a single point in time.
#[derive(Clone, Default)]
pub struct MultiBufferSnapshot {
excerpts: SumTree<Excerpt>,
- buffer_locators: TreeMap<BufferId, Arc<[Locator]>>,
+ buffers: TreeMap<BufferId, BufferStateSnapshot>,
+ path_keys_by_index: TreeMap<PathKeyIndex, PathKey>,
+ indices_by_path_key: TreeMap<PathKey, PathKeyIndex>,
diffs: SumTree<DiffStateSnapshot>,
diff_transforms: SumTree<DiffTransform>,
- excerpt_ids: SumTree<ExcerptIdMapping>,
- replaced_excerpts: Arc<HashMap<ExcerptId, ExcerptId>>,
non_text_state_update_count: usize,
edit_count: usize,
is_dirty: bool,
@@ -717,24 +713,12 @@ pub struct MultiBufferSnapshot {
show_headers: bool,
}
-// follower: None
-// - BufferContent(Some)
-// - BufferContent(None)
-// - DeletedHunk
-//
-// follower: Some
-// - BufferContent(Some)
-// - BufferContent(None)
-
#[derive(Debug, Clone)]
enum DiffTransform {
- // RealText
BufferContent {
summary: MBTextSummary,
- // modified_hunk_info
inserted_hunk_info: Option<DiffTransformHunkInfo>,
},
- // ExpandedHunkText
DeletedHunk {
summary: TextSummary,
buffer_id: BufferId,
@@ -746,52 +730,71 @@ enum DiffTransform {
#[derive(Clone, Copy, Debug)]
struct DiffTransformHunkInfo {
- excerpt_id: ExcerptId,
+ buffer_id: BufferId,
hunk_start_anchor: text::Anchor,
hunk_secondary_status: DiffHunkSecondaryStatus,
is_logically_deleted: bool,
+ excerpt_end: ExcerptAnchor,
}
impl Eq for DiffTransformHunkInfo {}
impl PartialEq for DiffTransformHunkInfo {
fn eq(&self, other: &DiffTransformHunkInfo) -> bool {
- self.excerpt_id == other.excerpt_id && self.hunk_start_anchor == other.hunk_start_anchor
+ self.buffer_id == other.buffer_id && self.hunk_start_anchor == other.hunk_start_anchor
}
}
impl std::hash::Hash for DiffTransformHunkInfo {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
- self.excerpt_id.hash(state);
+ self.buffer_id.hash(state);
self.hunk_start_anchor.hash(state);
}
}
#[derive(Clone)]
-pub struct ExcerptInfo {
- pub id: ExcerptId,
- pub buffer: Arc<BufferSnapshot>,
- pub buffer_id: BufferId,
+pub struct ExcerptBoundaryInfo {
+ pub start_anchor: Anchor,
pub range: ExcerptRange<text::Anchor>,
pub end_row: MultiBufferRow,
}
-impl std::fmt::Debug for ExcerptInfo {
+impl ExcerptBoundaryInfo {
+ pub fn start_text_anchor(&self) -> text::Anchor {
+ self.range.context.start
+ }
+ pub fn buffer_id(&self) -> BufferId {
+ self.start_text_anchor().buffer_id
+ }
+ pub fn buffer<'a>(&self, snapshot: &'a MultiBufferSnapshot) -> &'a BufferSnapshot {
+ snapshot
+ .buffer_for_id(self.buffer_id())
+ .expect("buffer snapshot not found for excerpt boundary")
+ }
+}
+
+impl std::fmt::Debug for ExcerptBoundaryInfo {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct(type_name::<Self>())
- .field("id", &self.id)
- .field("buffer_id", &self.buffer_id)
- .field("path", &self.buffer.file().map(|f| f.path()))
+ .field("buffer_id", &self.buffer_id())
.field("range", &self.range)
.finish()
}
}
+impl PartialEq for ExcerptBoundaryInfo {
+ fn eq(&self, other: &Self) -> bool {
+ self.start_anchor == other.start_anchor && self.range == other.range
+ }
+}
+
+impl Eq for ExcerptBoundaryInfo {}
+
/// A boundary between `Excerpt`s in a [`MultiBuffer`]
#[derive(Debug)]
pub struct ExcerptBoundary {
- pub prev: Option<ExcerptInfo>,
- pub next: ExcerptInfo,
+ pub prev: Option<ExcerptBoundaryInfo>,
+ pub next: ExcerptBoundaryInfo,
/// The row in the `MultiBuffer` where the boundary is located
pub row: MultiBufferRow,
}
@@ -800,7 +803,7 @@ impl ExcerptBoundary {
pub fn starts_new_buffer(&self) -> bool {
match (self.prev.as_ref(), &self.next) {
(None, _) => true,
- (Some(prev), next) => prev.buffer_id != next.buffer_id,
+ (Some(prev), next) => prev.buffer_id() != next.buffer_id(),
}
}
}
@@ -808,7 +811,7 @@ impl ExcerptBoundary {
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct ExpandInfo {
pub direction: ExpandExcerptDirection,
- pub excerpt_id: ExcerptId,
+ pub start_anchor: Anchor,
}
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
@@ -822,45 +825,20 @@ pub struct RowInfo {
}
/// A slice into a [`Buffer`] that is being edited in a [`MultiBuffer`].
-#[derive(Clone)]
-struct Excerpt {
- /// The unique identifier for this excerpt
- id: ExcerptId,
+#[derive(Clone, Debug)]
+pub(crate) struct Excerpt {
/// The location of the excerpt in the [`MultiBuffer`]
- locator: Locator,
- /// The buffer being excerpted
- buffer_id: BufferId,
- /// A snapshot of the buffer being excerpted
- buffer: Arc<BufferSnapshot>,
+ pub(crate) path_key: PathKey,
+ pub(crate) path_key_index: PathKeyIndex,
+ pub(crate) buffer_id: BufferId,
/// The range of the buffer to be shown in the excerpt
- range: ExcerptRange<text::Anchor>,
+ pub(crate) range: ExcerptRange<text::Anchor>,
+
/// The last row in the excerpted slice of the buffer
- max_buffer_row: BufferRow,
+ pub(crate) max_buffer_row: BufferRow,
/// A summary of the text in the excerpt
- text_summary: TextSummary,
- has_trailing_newline: bool,
-}
-
-/// A public view into an `Excerpt` in a [`MultiBuffer`].
-///
-/// Contains methods for getting the [`Buffer`] of the excerpt,
-/// as well as mapping offsets to/from buffer and multibuffer coordinates.
-#[derive(Clone)]
-pub struct MultiBufferExcerpt<'a> {
- excerpt: &'a Excerpt,
- diff_transforms:
- sum_tree::Cursor<'a, 'static, DiffTransform, DiffTransforms<MultiBufferOffset>>,
- /// The offset in the multibuffer considering diff transforms.
- offset: MultiBufferOffset,
- /// The offset in the multibuffer without diff transforms.
- excerpt_offset: ExcerptOffset,
- buffer_offset: BufferOffset,
-}
-
-#[derive(Clone, Debug)]
-struct ExcerptIdMapping {
- id: ExcerptId,
- locator: Locator,
+ pub(crate) text_summary: TextSummary,
+ pub(crate) has_trailing_newline: bool,
}
/// A range of text from a single [`Buffer`], to be shown as an `Excerpt`.
@@ -883,16 +861,37 @@ impl<T: Clone> ExcerptRange<T> {
}
}
-#[derive(Clone, Debug, Default)]
+impl ExcerptRange<text::Anchor> {
+ pub fn contains(&self, t: &text::Anchor, snapshot: &BufferSnapshot) -> bool {
+ self.context.start.cmp(t, snapshot).is_le() && self.context.end.cmp(t, snapshot).is_ge()
+ }
+}
+
+#[derive(Clone, Debug)]
pub struct ExcerptSummary {
- excerpt_id: ExcerptId,
- /// The location of the last [`Excerpt`] being summarized
- excerpt_locator: Locator,
+ path_key: PathKey,
+ max_anchor: Option<text::Anchor>,
widest_line_number: u32,
text: MBTextSummary,
count: usize,
}
+impl ExcerptSummary {
+ pub fn min() -> Self {
+ ExcerptSummary {
+ path_key: PathKey::min(),
+ max_anchor: None,
+ widest_line_number: 0,
+ text: MBTextSummary::default(),
+ count: 0,
+ }
+ }
+
+ fn len(&self) -> ExcerptOffset {
+ ExcerptDimension(self.text.len)
+ }
+}
+
#[derive(Debug, Clone)]
pub struct DiffTransformSummary {
input: MBTextSummary,
@@ -1068,13 +1067,13 @@ pub struct MultiBufferChunks<'a> {
excerpts: Cursor<'a, 'static, Excerpt, ExcerptOffset>,
diff_transforms:
Cursor<'a, 'static, DiffTransform, Dimensions<MultiBufferOffset, ExcerptOffset>>,
- diffs: &'a SumTree<DiffStateSnapshot>,
diff_base_chunks: Option<(BufferId, BufferChunks<'a>)>,
buffer_chunk: Option<Chunk<'a>>,
range: Range<MultiBufferOffset>,
excerpt_offset_range: Range<ExcerptOffset>,
excerpt_chunks: Option<ExcerptChunks<'a>>,
language_aware: bool,
+ snapshot: &'a MultiBufferSnapshot,
}
pub struct ReversedMultiBufferChunks<'a> {
@@ -1128,8 +1127,8 @@ impl<'a, MBD: MultiBufferDimension> Dimension<'a, DiffTransformSummary> for Diff
struct MultiBufferCursor<'a, MBD, BD> {
excerpts: Cursor<'a, 'static, Excerpt, ExcerptDimension<MBD>>,
diff_transforms: Cursor<'a, 'static, DiffTransform, DiffTransforms<MBD>>,
- diffs: &'a SumTree<DiffStateSnapshot>,
cached_region: OnceCell<Option<MultiBufferRegion<'a, MBD, BD>>>,
+ snapshot: &'a MultiBufferSnapshot,
}
#[derive(Clone)]
@@ -1144,8 +1143,8 @@ struct MultiBufferRegion<'a, MBD, BD> {
}
struct ExcerptChunks<'a> {
- excerpt_id: ExcerptId,
content_chunks: BufferChunks<'a>,
+ end: ExcerptAnchor,
has_footer: bool,
}
@@ -1155,7 +1154,6 @@ struct BufferEdit {
new_text: Arc<str>,
is_insertion: bool,
original_indent_column: Option<u32>,
- excerpt_id: ExcerptId,
}
#[derive(Clone, Copy, Debug, PartialEq)]
@@ -1258,8 +1256,6 @@ impl MultiBuffer {
singleton: false,
capability,
title: None,
- excerpts_by_path: Default::default(),
- paths_by_excerpt: Default::default(),
buffer_changed_since_sync: Default::default(),
history: History::default(),
}
@@ -1276,11 +1272,6 @@ impl MultiBuffer {
*buffer_id,
BufferState {
buffer: buffer_state.buffer.clone(),
- last_version: buffer_state.last_version.clone(),
- last_non_text_state_update_count: buffer_state
- .last_non_text_state_update_count
- .clone(),
- excerpts: buffer_state.excerpts.clone(),
_subscriptions: [
new_cx.observe(&buffer_state.buffer, |_, _, cx| cx.notify()),
new_cx.subscribe(&buffer_state.buffer, Self::on_buffer_event),
@@ -1295,8 +1286,6 @@ impl MultiBuffer {
Self {
snapshot: RefCell::new(self.snapshot.borrow().clone()),
buffers,
- excerpts_by_path: Default::default(),
- paths_by_excerpt: Default::default(),
diffs: diff_bases,
subscriptions: Default::default(),
singleton: self.singleton,
@@ -1451,7 +1440,7 @@ impl MultiBuffer {
_ => Default::default(),
};
- let (buffer_edits, edited_excerpt_ids) = MultiBuffer::convert_edits_to_buffer_edits(
+ let buffer_edits = MultiBuffer::convert_edits_to_buffer_edits(
edits,
this.snapshot.get_mut(),
&original_indent_columns,
@@ -1472,14 +1461,12 @@ impl MultiBuffer {
mut new_text,
mut is_insertion,
original_indent_column,
- excerpt_id,
}) = edits.next()
{
while let Some(BufferEdit {
range: next_range,
is_insertion: next_is_insertion,
new_text: next_new_text,
- excerpt_id: next_excerpt_id,
..
}) = edits.peek()
{
@@ -1492,9 +1479,7 @@ impl MultiBuffer {
if should_coalesce {
range.end = cmp::max(next_range.end, range.end);
is_insertion |= *next_is_insertion;
- if excerpt_id == *next_excerpt_id {
- new_text = format!("{new_text}{next_new_text}").into();
- }
+ new_text = format!("{new_text}{next_new_text}").into();
edits.next();
} else {
break;
@@ -1542,10 +1527,7 @@ impl MultiBuffer {
})
}
- cx.emit(Event::ExcerptsEdited {
- excerpt_ids: edited_excerpt_ids,
- buffer_ids,
- });
+ cx.emit(Event::BuffersEdited { buffer_ids });
}
}
@@ -1553,9 +1535,8 @@ impl MultiBuffer {
edits: Vec<(Range<MultiBufferOffset>, Arc<str>)>,
snapshot: &MultiBufferSnapshot,
original_indent_columns: &[Option<u32>],
- ) -> (HashMap<BufferId, Vec<BufferEdit>>, Vec<ExcerptId>) {
+ ) -> HashMap<BufferId, Vec<BufferEdit>> {
let mut buffer_edits: HashMap<BufferId, Vec<BufferEdit>> = Default::default();
- let mut edited_excerpt_ids = Vec::new();
let mut cursor = snapshot.cursor::<MultiBufferOffset, BufferOffset>();
for (ix, (range, new_text)) in edits.into_iter().enumerate() {
let original_indent_column = original_indent_columns.get(ix).copied().flatten();
@@ -1600,11 +1581,10 @@ impl MultiBuffer {
let buffer_end =
(end_region.buffer_range.start + end_overshoot).min(end_region.buffer_range.end);
- if start_region.excerpt.id == end_region.excerpt.id {
+ if start_region.excerpt == end_region.excerpt {
if start_region.buffer.capability == Capability::ReadWrite
&& start_region.is_main_buffer
{
- edited_excerpt_ids.push(start_region.excerpt.id);
buffer_edits
.entry(start_region.buffer.remote_id())
.or_default()
@@ -1613,7 +1593,6 @@ impl MultiBuffer {
new_text,
is_insertion: true,
original_indent_column,
- excerpt_id: start_region.excerpt.id,
});
}
} else {
@@ -1622,7 +1601,6 @@ impl MultiBuffer {
if start_region.buffer.capability == Capability::ReadWrite
&& start_region.is_main_buffer
{
- edited_excerpt_ids.push(start_region.excerpt.id);
buffer_edits
.entry(start_region.buffer.remote_id())
.or_default()
@@ -1631,14 +1609,11 @@ impl MultiBuffer {
new_text: new_text.clone(),
is_insertion: true,
original_indent_column,
- excerpt_id: start_region.excerpt.id,
});
}
- let excerpt_id = end_region.excerpt.id;
if end_region.buffer.capability == Capability::ReadWrite
&& end_region.is_main_buffer
{
- edited_excerpt_ids.push(excerpt_id);
buffer_edits
.entry(end_region.buffer.remote_id())
.or_default()
@@ -1647,18 +1622,17 @@ impl MultiBuffer {
new_text: new_text.clone(),
is_insertion: false,
original_indent_column,
- excerpt_id,
});
}
+ let end_region_excerpt = end_region.excerpt.clone();
cursor.seek(&range.start);
cursor.next_excerpt();
while let Some(region) = cursor.region() {
- if region.excerpt.id == excerpt_id {
+ if region.excerpt == &end_region_excerpt {
break;
}
if region.buffer.capability == Capability::ReadWrite && region.is_main_buffer {
- edited_excerpt_ids.push(region.excerpt.id);
buffer_edits
.entry(region.buffer.remote_id())
.or_default()
@@ -1667,14 +1641,13 @@ impl MultiBuffer {
new_text: new_text.clone(),
is_insertion: false,
original_indent_column,
- excerpt_id: region.excerpt.id,
});
}
cursor.next_excerpt();
}
}
}
- (buffer_edits, edited_excerpt_ids)
+ buffer_edits
}
pub fn autoindent_ranges<I, S>(&mut self, ranges: I, cx: &mut Context<Self>)
@@ -1706,7 +1679,7 @@ impl MultiBuffer {
edits: Vec<(Range<MultiBufferOffset>, Arc<str>)>,
cx: &mut Context<MultiBuffer>,
) {
- let (buffer_edits, edited_excerpt_ids) =
+ let buffer_edits =
MultiBuffer::convert_edits_to_buffer_edits(edits, this.snapshot.get_mut(), &[]);
let mut buffer_ids = Vec::new();
@@ -1730,10 +1703,7 @@ impl MultiBuffer {
})
}
- cx.emit(Event::ExcerptsEdited {
- excerpt_ids: edited_excerpt_ids,
- buffer_ids,
- });
+ cx.emit(Event::BuffersEdited { buffer_ids });
}
}
@@ -1744,38 +1714,25 @@ impl MultiBuffer {
cursor_shape: CursorShape,
cx: &mut Context<Self>,
) {
+ let snapshot = self.snapshot(cx);
let mut selections_by_buffer: HashMap<BufferId, Vec<Selection<text::Anchor>>> =
Default::default();
- let snapshot = self.read(cx);
- let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>(());
- for selection in selections {
- let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id);
- let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id);
- cursor.seek(&Some(start_locator), Bias::Left);
- while let Some(excerpt) = cursor.item()
- && excerpt.locator <= *end_locator
+ for selection in selections {
+ for (buffer_snapshot, buffer_range, _) in
+ snapshot.range_to_buffer_ranges(selection.start..selection.end)
{
- let mut start = excerpt.range.context.start;
- let mut end = excerpt.range.context.end;
- if excerpt.id == selection.start.excerpt_id {
- start = selection.start.text_anchor;
- }
- if excerpt.id == selection.end.excerpt_id {
- end = selection.end.text_anchor;
- }
selections_by_buffer
- .entry(excerpt.buffer_id)
+ .entry(buffer_snapshot.remote_id())
.or_default()
.push(Selection {
id: selection.id,
- start,
- end,
+ start: buffer_snapshot
+ .anchor_at(buffer_range.start, selection.start.bias()),
+ end: buffer_snapshot.anchor_at(buffer_range.end, selection.end.bias()),
reversed: selection.reversed,
goal: selection.goal,
});
-
- cursor.next();
}
}
@@ -1787,25 +1744,9 @@ impl MultiBuffer {
}
}
- for (buffer_id, mut selections) in selections_by_buffer {
+ for (buffer_id, selections) in selections_by_buffer {
self.buffers[&buffer_id].buffer.update(cx, |buffer, cx| {
- selections.sort_unstable_by(|a, b| a.start.cmp(&b.start, buffer));
- let mut selections = selections.into_iter().peekable();
- let merged_selections = Arc::from_iter(iter::from_fn(|| {
- let mut selection = selections.next()?;
- while let Some(next_selection) = selections.peek() {
- if selection.end.cmp(&next_selection.start, buffer).is_ge() {
- let next_selection = selections.next().unwrap();
- if next_selection.end.cmp(&selection.end, buffer).is_ge() {
- selection.end = next_selection.end;
- }
- } else {
- break;
- }
- }
- Some(selection)
- }));
- buffer.set_active_selections(merged_selections, line_mode, cursor_shape, cx);
+ buffer.set_active_selections(selections.into(), line_mode, cursor_shape, cx);
});
}
}
@@ -1821,200 +1762,31 @@ impl MultiBuffer {
#[instrument(skip_all)]
fn merge_excerpt_ranges<'a>(
expanded_ranges: impl IntoIterator<Item = &'a ExcerptRange<Point>> + 'a,
- ) -> (Vec<ExcerptRange<Point>>, Vec<usize>) {
+ ) -> Vec<ExcerptRange<Point>> {
+ let mut sorted: Vec<_> = expanded_ranges.into_iter().collect();
+ sorted.sort_by_key(|range| range.context.start);
let mut merged_ranges: Vec<ExcerptRange<Point>> = Vec::new();
- let mut counts: Vec<usize> = Vec::new();
- for range in expanded_ranges {
+ for range in sorted {
if let Some(last_range) = merged_ranges.last_mut() {
- assert!(
- last_range.context.start <= range.context.start,
- "ranges must be sorted: {last_range:?} <= {range:?}"
- );
if last_range.context.end >= range.context.start
|| last_range.context.end.row + 1 == range.context.start.row
{
last_range.context.end = range.context.end.max(last_range.context.end);
- *counts.last_mut().unwrap() += 1;
continue;
}
}
merged_ranges.push(range.clone());
- counts.push(1);
- }
- (merged_ranges, counts)
- }
-
- pub fn insert_excerpts_after<O>(
- &mut self,
- prev_excerpt_id: ExcerptId,
- buffer: Entity<Buffer>,
- ranges: impl IntoIterator<Item = ExcerptRange<O>>,
- cx: &mut Context<Self>,
- ) -> Vec<ExcerptId>
- where
- O: text::ToOffset,
- {
- let mut ids = Vec::new();
- let mut next_excerpt_id =
- if let Some(last_entry) = self.snapshot.borrow().excerpt_ids.last() {
- last_entry.id.0 + 1
- } else {
- 1
- };
- self.insert_excerpts_with_ids_after(
- prev_excerpt_id,
- buffer,
- ranges.into_iter().map(|range| {
- let id = ExcerptId(post_inc(&mut next_excerpt_id));
- ids.push(id);
- (id, range)
- }),
- cx,
- );
- ids
- }
-
- pub fn insert_excerpts_with_ids_after<O>(
- &mut self,
- prev_excerpt_id: ExcerptId,
- buffer: Entity<Buffer>,
- ranges: impl IntoIterator<Item = (ExcerptId, ExcerptRange<O>)>,
- cx: &mut Context<Self>,
- ) where
- O: text::ToOffset,
- {
- assert_eq!(self.history.transaction_depth(), 0);
- let mut ranges = ranges.into_iter().peekable();
- if ranges.peek().is_none() {
- return Default::default();
- }
-
- self.sync_mut(cx);
-
- let buffer_snapshot = buffer.read(cx).snapshot();
- let buffer_id = buffer_snapshot.remote_id();
-
- let buffer_state = self.buffers.entry(buffer_id).or_insert_with(|| {
- self.buffer_changed_since_sync.replace(true);
- buffer.update(cx, |buffer, _| {
- buffer.record_changes(Rc::downgrade(&self.buffer_changed_since_sync));
- });
- BufferState {
- last_version: RefCell::new(buffer_snapshot.version().clone()),
- last_non_text_state_update_count: Cell::new(
- buffer_snapshot.non_text_state_update_count(),
- ),
- excerpts: Default::default(),
- _subscriptions: [
- cx.observe(&buffer, |_, _, cx| cx.notify()),
- cx.subscribe(&buffer, Self::on_buffer_event),
- ],
- buffer: buffer.clone(),
- }
- });
-
- let mut snapshot = self.snapshot.get_mut();
-
- let mut prev_locator = snapshot.excerpt_locator_for_id(prev_excerpt_id).clone();
- let mut new_excerpt_ids = mem::take(&mut snapshot.excerpt_ids);
- let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>(());
- let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right);
- prev_locator = cursor.start().unwrap_or(Locator::min_ref()).clone();
-
- let edit_start = ExcerptDimension(new_excerpts.summary().text.len);
- new_excerpts.update_last(
- |excerpt| {
- excerpt.has_trailing_newline = true;
- },
- (),
- );
-
- let next_locator = if let Some(excerpt) = cursor.item() {
- excerpt.locator.clone()
- } else {
- Locator::max()
- };
-
- let mut excerpts = Vec::new();
- let buffer_snapshot = Arc::new(buffer_snapshot);
- while let Some((id, range)) = ranges.next() {
- let locator = Locator::between(&prev_locator, &next_locator);
- if let Err(ix) = buffer_state.excerpts.binary_search(&locator) {
- buffer_state.excerpts.insert(ix, locator.clone());
- }
- let range = ExcerptRange {
- context: buffer_snapshot.anchor_before(&range.context.start)
- ..buffer_snapshot.anchor_after(&range.context.end),
- primary: buffer_snapshot.anchor_before(&range.primary.start)
- ..buffer_snapshot.anchor_after(&range.primary.end),
- };
- excerpts.push((id, range.clone()));
- let excerpt = Excerpt::new(
- id,
- locator.clone(),
- buffer_id,
- buffer_snapshot.clone(),
- range,
- ranges.peek().is_some() || cursor.item().is_some(),
- );
- new_excerpts.push(excerpt, ());
- prev_locator = locator.clone();
-
- if let Some(last_mapping_entry) = new_excerpt_ids.last() {
- assert!(id > last_mapping_entry.id, "excerpt ids must be increasing");
- }
- new_excerpt_ids.push(ExcerptIdMapping { id, locator }, ());
- }
- snapshot
- .buffer_locators
- .insert(buffer_id, buffer_state.excerpts.iter().cloned().collect());
-
- let edit_end = ExcerptDimension(new_excerpts.summary().text.len);
-
- let suffix = cursor.suffix();
- let changed_trailing_excerpt = suffix.is_empty();
- new_excerpts.append(suffix, ());
- drop(cursor);
- snapshot.excerpts = new_excerpts;
- snapshot.excerpt_ids = new_excerpt_ids;
- if changed_trailing_excerpt {
- snapshot.trailing_excerpt_update_count += 1;
- }
-
- let edits = Self::sync_diff_transforms(
- &mut snapshot,
- vec![Edit {
- old: edit_start..edit_start,
- new: edit_start..edit_end,
- }],
- DiffChangeKind::BufferEdited,
- );
- if !edits.is_empty() {
- self.subscriptions.publish(edits);
}
-
- cx.emit(Event::Edited {
- edited_buffer: None,
- is_local: true,
- });
- cx.emit(Event::ExcerptsAdded {
- buffer,
- predecessor: prev_excerpt_id,
- excerpts,
- });
- cx.notify();
+ merged_ranges
}
pub fn clear(&mut self, cx: &mut Context<Self>) {
self.sync_mut(cx);
- let ids = self.excerpt_ids();
let removed_buffer_ids = std::mem::take(&mut self.buffers).into_keys().collect();
- self.excerpts_by_path.clear();
- self.paths_by_excerpt.clear();
+ self.diffs.clear();
let MultiBufferSnapshot {
excerpts,
- buffer_locators,
- diffs: _,
+ diffs,
diff_transforms: _,
non_text_state_update_count: _,
edit_count: _,
@@ -2023,27 +1795,25 @@ impl MultiBuffer {
has_conflict,
has_inverted_diff,
singleton: _,
- excerpt_ids: _,
- replaced_excerpts,
trailing_excerpt_update_count,
all_diff_hunks_expanded: _,
show_deleted_hunks: _,
use_extended_diff_range: _,
show_headers: _,
+ path_keys_by_index: _,
+ indices_by_path_key: _,
+ buffers,
} = self.snapshot.get_mut();
- buffer_locators.clear();
let start = ExcerptDimension(MultiBufferOffset::ZERO);
let prev_len = ExcerptDimension(excerpts.summary().text.len);
*excerpts = Default::default();
+ *buffers = Default::default();
+ *diffs = Default::default();
*trailing_excerpt_update_count += 1;
*is_dirty = false;
*has_deleted_file = false;
*has_conflict = false;
*has_inverted_diff = false;
- match Arc::get_mut(replaced_excerpts) {
- Some(replaced_excerpts) => replaced_excerpts.clear(),
- None => *replaced_excerpts = Default::default(),
- }
let edits = Self::sync_diff_transforms(
self.snapshot.get_mut(),
@@ -2060,120 +1830,10 @@ impl MultiBuffer {
edited_buffer: None,
is_local: true,
});
- cx.emit(Event::ExcerptsRemoved {
- ids,
- removed_buffer_ids,
- });
+ cx.emit(Event::BuffersRemoved { removed_buffer_ids });
cx.notify();
}
- #[ztracing::instrument(skip_all)]
- pub fn excerpts_for_buffer(
- &self,
- buffer_id: BufferId,
- cx: &App,
- ) -> Vec<(ExcerptId, Arc<BufferSnapshot>, ExcerptRange<text::Anchor>)> {
- let mut excerpts = Vec::new();
- let snapshot = self.read(cx);
- let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>(());
- if let Some(locators) = snapshot.buffer_locators.get(&buffer_id) {
- for locator in &**locators {
- cursor.seek_forward(&Some(locator), Bias::Left);
- if let Some(excerpt) = cursor.item()
- && excerpt.locator == *locator
- {
- excerpts.push((excerpt.id, excerpt.buffer.clone(), excerpt.range.clone()));
- }
- }
- }
-
- excerpts
- }
-
- pub fn excerpt_ranges_for_buffer(&self, buffer_id: BufferId, cx: &App) -> Vec<Range<Point>> {
- let snapshot = self.read(cx);
- let mut excerpts = snapshot
- .excerpts
- .cursor::<Dimensions<Option<&Locator>, ExcerptPoint>>(());
- let mut diff_transforms = snapshot
- .diff_transforms
- .cursor::<Dimensions<ExcerptPoint, OutputDimension<Point>>>(());
- diff_transforms.next();
- let locators = snapshot
- .buffer_locators
- .get(&buffer_id)
- .into_iter()
- .flat_map(|v| &**v);
- let mut result = Vec::new();
- for locator in locators {
- excerpts.seek_forward(&Some(locator), Bias::Left);
- if let Some(excerpt) = excerpts.item()
- && excerpt.locator == *locator
- {
- let excerpt_start = excerpts.start().1;
- let excerpt_end = excerpt_start + excerpt.text_summary.lines;
-
- diff_transforms.seek_forward(&excerpt_start, Bias::Left);
- let overshoot = excerpt_start - diff_transforms.start().0;
- let start = diff_transforms.start().1 + overshoot;
-
- diff_transforms.seek_forward(&excerpt_end, Bias::Right);
- let overshoot = excerpt_end - diff_transforms.start().0;
- let end = diff_transforms.start().1 + overshoot;
-
- result.push(start.0..end.0)
- }
- }
- result
- }
-
- pub fn excerpt_buffer_ids(&self) -> Vec<BufferId> {
- self.snapshot
- .borrow()
- .excerpts
- .iter()
- .map(|entry| entry.buffer_id)
- .collect()
- }
-
- pub fn excerpt_ids(&self) -> Vec<ExcerptId> {
- let snapshot = self.snapshot.borrow();
- let mut ids = Vec::with_capacity(snapshot.excerpts.summary().count);
- ids.extend(snapshot.excerpts.iter().map(|entry| entry.id));
- ids
- }
-
- pub fn excerpt_containing(
- &self,
- position: impl ToOffset,
- cx: &App,
- ) -> Option<(ExcerptId, Entity<Buffer>, Range<text::Anchor>)> {
- let snapshot = self.read(cx);
- let offset = position.to_offset(&snapshot);
-
- let mut cursor = snapshot.cursor::<MultiBufferOffset, BufferOffset>();
- cursor.seek(&offset);
- cursor
- .excerpt()
- .or_else(|| snapshot.excerpts.last())
- .map(|excerpt| {
- (
- excerpt.id,
- self.buffers.get(&excerpt.buffer_id).unwrap().buffer.clone(),
- excerpt.range.context.clone(),
- )
- })
- }
-
- pub fn buffer_for_anchor(&self, anchor: Anchor, cx: &App) -> Option<Entity<Buffer>> {
- if let Some(buffer_id) = anchor.text_anchor.buffer_id {
- self.buffer(buffer_id)
- } else {
- let (_, buffer, _) = self.excerpt_containing(anchor, cx)?;
- Some(buffer)
- }
- }
-
// If point is at the end of the buffer, the last excerpt is returned
pub fn point_to_buffer_offset<T: ToOffset>(
&self,
@@ -77,22 +77,19 @@ fn test_buffer_point_to_anchor_at_end_of_singleton_buffer(cx: &mut App) {
let buffer = cx.new(|cx| Buffer::local("abc", cx));
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx));
- let excerpt_id = multibuffer
+ let anchor = multibuffer
.read(cx)
- .excerpt_ids()
- .into_iter()
- .next()
+ .buffer_point_to_anchor(&buffer, Point::new(0, 3), cx)
.unwrap();
- let anchor = multibuffer
+ let (anchor, _) = multibuffer
.read(cx)
- .buffer_point_to_anchor(&buffer, Point::new(0, 3), cx);
+ .snapshot(cx)
+ .anchor_to_buffer_anchor(anchor)
+ .unwrap();
assert_eq!(
anchor,
- Some(Anchor::in_buffer(
- excerpt_id,
- buffer.read(cx).snapshot().anchor_after(Point::new(0, 3)),
- ))
+ buffer.read(cx).snapshot().anchor_after(Point::new(0, 3)),
);
}
@@ -346,7 +343,7 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) {
);
let snapshot = multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.remove_excerpts_for_path(PathKey::sorted(1), cx);
+ multibuffer.remove_excerpts(PathKey::sorted(1), cx);
multibuffer.snapshot(cx)
});
@@ -373,7 +370,7 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) {
boundary.row,
boundary
.next
- .buffer
+ .buffer(snapshot)
.text_for_range(boundary.next.range.context)
.collect::<String>(),
starts_new_buffer,
@@ -440,7 +437,7 @@ async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
multibuffer.update(cx, |multibuffer, cx| {
multibuffer.add_diff(diff, cx);
- multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
+ multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
});
assert_new_snapshot(
@@ -480,7 +477,7 @@ async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
);
multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
+ multibuffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
});
assert_new_snapshot(
@@ -521,7 +518,7 @@ async fn test_diff_hunks_in_range_query_starting_at_added_row(cx: &mut TestAppCo
multibuffer.update(cx, |multibuffer, cx| {
multibuffer.add_diff(diff, cx);
- multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
+ multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
});
assert_new_snapshot(
@@ -766,12 +763,27 @@ fn test_excerpt_events(cx: &mut App) {
cx.subscribe(
&leader_multibuffer,
move |follower, _, event, cx| match event.clone() {
- Event::ExcerptsAdded {
+ Event::BufferRangesUpdated {
buffer,
- predecessor,
- excerpts,
- } => follower.insert_excerpts_with_ids_after(predecessor, buffer, excerpts, cx),
- Event::ExcerptsRemoved { ids, .. } => follower.remove_excerpts(ids, cx),
+ path_key,
+ ranges,
+ } => {
+ let buffer_snapshot = buffer.read(cx).snapshot();
+ follower.set_merged_excerpt_ranges_for_path(
+ path_key,
+ buffer,
+ &buffer_snapshot,
+ ranges,
+ cx,
+ );
+ }
+ Event::BuffersRemoved {
+ removed_buffer_ids, ..
+ } => {
+ for id in removed_buffer_ids {
+ follower.remove_excerpts_for_buffer(id, cx);
+ }
+ }
Event::Edited { .. } => {
*follower_edit_event_count.write() += 1;
}
@@ -885,9 +897,14 @@ fn test_expand_excerpts(cx: &mut App) {
drop(snapshot);
multibuffer.update(cx, |multibuffer, cx| {
- let line_zero = multibuffer.snapshot(cx).anchor_before(Point::new(0, 0));
+ let multibuffer_snapshot = multibuffer.snapshot(cx);
+ let line_zero = multibuffer_snapshot.anchor_before(Point::new(0, 0));
multibuffer.expand_excerpts(
- multibuffer.excerpt_ids(),
+ multibuffer.snapshot(cx).excerpts().map(|excerpt| {
+ multibuffer_snapshot
+ .anchor_in_excerpt(excerpt.context.start)
+ .unwrap()
+ }),
1,
ExpandExcerptDirection::UpAndDown,
cx,
@@ -1184,16 +1201,10 @@ fn test_multibuffer_anchors(cx: &mut App) {
.to_offset(&old_snapshot),
MultiBufferOffset(0)
);
- assert_eq!(Anchor::min().to_offset(&old_snapshot), MultiBufferOffset(0));
- assert_eq!(Anchor::min().to_offset(&old_snapshot), MultiBufferOffset(0));
- assert_eq!(
- Anchor::max().to_offset(&old_snapshot),
- MultiBufferOffset(10)
- );
- assert_eq!(
- Anchor::max().to_offset(&old_snapshot),
- MultiBufferOffset(10)
- );
+ assert_eq!(Anchor::Min.to_offset(&old_snapshot), MultiBufferOffset(0));
+ assert_eq!(Anchor::Min.to_offset(&old_snapshot), MultiBufferOffset(0));
+ assert_eq!(Anchor::Max.to_offset(&old_snapshot), MultiBufferOffset(10));
+ assert_eq!(Anchor::Max.to_offset(&old_snapshot), MultiBufferOffset(10));
buffer_1.update(cx, |buffer, cx| {
buffer.edit([(0..0, "W")], None, cx);
@@ -1270,153 +1281,6 @@ fn test_multibuffer_anchors(cx: &mut App) {
);
}
-#[gpui::test]
-fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut App) {
- let buffer_1 = cx.new(|cx| Buffer::local("abcd", cx));
- let buffer_2 = cx.new(|cx| Buffer::local("ABCDEFGHIJKLMNOP", cx));
- let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
-
- // Create an insertion id in buffer 1 that doesn't exist in buffer 2.
- // Add an excerpt from buffer 1 that spans this new insertion.
- buffer_1.update(cx, |buffer, cx| buffer.edit([(4..4, "123")], None, cx));
- let excerpt_id_1 = multibuffer.update(cx, |multibuffer, cx| {
- let buffer_1_snapshot = buffer_1.read(cx).snapshot();
- multibuffer.set_excerpt_ranges_for_path(
- PathKey::sorted(0),
- buffer_1,
- &buffer_1_snapshot,
- vec![ExcerptRange::new((0..7).to_point(&buffer_1_snapshot))],
- cx,
- );
- multibuffer.excerpt_ids().into_iter().next().unwrap()
- });
-
- let snapshot_1 = multibuffer.read(cx).snapshot(cx);
- assert_eq!(snapshot_1.text(), "abcd123");
-
- // Replace the buffer 1 excerpt with new excerpts from buffer 2.
- let (excerpt_id_2, _excerpt_id_3) = multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.remove_excerpts_for_path(PathKey::sorted(0), cx);
- let snapshot_2 = buffer_2.read(cx).snapshot();
- multibuffer.set_excerpt_ranges_for_path(
- PathKey::sorted(1),
- buffer_2.clone(),
- &buffer_2.read(cx).snapshot(),
- vec![
- ExcerptRange::new((0..4).to_point(&snapshot_2)),
- ExcerptRange::new((6..10).to_point(&snapshot_2)),
- ExcerptRange::new((12..16).to_point(&snapshot_2)),
- ],
- cx,
- );
- let mut ids = multibuffer
- .excerpts_for_buffer(buffer_2.read(cx).remote_id(), cx)
- .into_iter()
- .map(|(id, _, _)| id);
- (ids.next().unwrap(), ids.next().unwrap())
- });
- let snapshot_2 = multibuffer.read(cx).snapshot(cx);
- assert_eq!(snapshot_2.text(), "ABCD\nGHIJ\nMNOP");
-
- // The old excerpt id doesn't get reused.
- assert_ne!(excerpt_id_2, excerpt_id_1);
-
- // Resolve some anchors from the previous snapshot in the new snapshot.
- // The current excerpts are from a different buffer, so we don't attempt to
- // resolve the old text anchor in the new buffer.
- assert_eq!(
- snapshot_2.summary_for_anchor::<MultiBufferOffset>(
- &snapshot_1.anchor_before(MultiBufferOffset(2))
- ),
- MultiBufferOffset(0)
- );
- assert_eq!(
- snapshot_2.summaries_for_anchors::<MultiBufferOffset, _>(&[
- snapshot_1.anchor_before(MultiBufferOffset(2)),
- snapshot_1.anchor_after(MultiBufferOffset(3))
- ]),
- vec![MultiBufferOffset(0), MultiBufferOffset(0)]
- );
-
- // Refresh anchors from the old snapshot. The return value indicates that both
- // anchors lost their original excerpt.
- let refresh = snapshot_2.refresh_anchors(&[
- snapshot_1.anchor_before(MultiBufferOffset(2)),
- snapshot_1.anchor_after(MultiBufferOffset(3)),
- ]);
- assert_eq!(
- refresh,
- &[
- (0, snapshot_2.anchor_before(MultiBufferOffset(0)), false),
- (1, snapshot_2.anchor_after(MultiBufferOffset(0)), false),
- ]
- );
-
- // Replace the middle excerpt with a smaller excerpt in buffer 2,
- // that intersects the old excerpt.
- multibuffer.update(cx, |multibuffer, cx| {
- let snapshot_2 = buffer_2.read(cx).snapshot();
- multibuffer.set_excerpt_ranges_for_path(
- PathKey::sorted(1),
- buffer_2.clone(),
- &buffer_2.read(cx).snapshot(),
- vec![
- ExcerptRange::new((0..4).to_point(&snapshot_2)),
- ExcerptRange::new((12..16).to_point(&snapshot_2)),
- ],
- cx,
- );
- multibuffer.set_excerpt_ranges_for_path(
- PathKey::sorted(1),
- buffer_2.clone(),
- &buffer_2.read(cx).snapshot(),
- vec![
- ExcerptRange::new((0..4).to_point(&snapshot_2)),
- ExcerptRange::new((5..8).to_point(&snapshot_2)),
- ExcerptRange::new((12..16).to_point(&snapshot_2)),
- ],
- cx,
- );
- });
-
- let snapshot_3 = multibuffer.read(cx).snapshot(cx);
- assert_eq!(snapshot_3.text(), "ABCD\nFGH\nMNOP");
-
- // Resolve some anchors from the previous snapshot in the new snapshot.
- // The third anchor can't be resolved, since its excerpt has been removed,
- // so it resolves to the same position as its predecessor.
- let anchors = [
- snapshot_2.anchor_before(MultiBufferOffset(0)),
- snapshot_2.anchor_after(MultiBufferOffset(2)),
- snapshot_2.anchor_after(MultiBufferOffset(6)),
- snapshot_2.anchor_after(MultiBufferOffset(14)),
- ];
- assert_eq!(
- snapshot_3.summaries_for_anchors::<MultiBufferOffset, _>(&anchors),
- &[
- MultiBufferOffset(0),
- MultiBufferOffset(2),
- MultiBufferOffset(9),
- MultiBufferOffset(13)
- ]
- );
-
- let new_anchors = snapshot_3.refresh_anchors(&anchors);
- assert_eq!(
- new_anchors.iter().map(|a| (a.0, a.2)).collect::<Vec<_>>(),
- &[(0, true), (1, true), (2, true), (3, true)]
- );
- assert_eq!(
- snapshot_3.summaries_for_anchors::<MultiBufferOffset, _>(new_anchors.iter().map(|a| &a.1)),
- &[
- MultiBufferOffset(0),
- MultiBufferOffset(2),
- MultiBufferOffset(7),
- MultiBufferOffset(13)
- ]
- );
-}
-
#[gpui::test]
async fn test_basic_diff_hunks(cx: &mut TestAppContext) {
let text = indoc!(
@@ -1467,7 +1331,7 @@ async fn test_basic_diff_hunks(cx: &mut TestAppContext) {
);
multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
+ multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
});
assert_new_snapshot(
@@ -1513,7 +1377,7 @@ async fn test_basic_diff_hunks(cx: &mut TestAppContext) {
assert_line_indents(&snapshot);
multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx)
+ multibuffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx)
});
assert_new_snapshot(
&multibuffer,
@@ -1700,7 +1564,7 @@ async fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) {
});
multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
+ multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
});
assert_new_snapshot(
@@ -1751,7 +1615,7 @@ async fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) {
// Now collapse all diff hunks
multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
+ multibuffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
});
assert_new_snapshot(
@@ -2097,6 +1961,203 @@ fn test_set_excerpts_for_buffer(cx: &mut TestAppContext) {
});
}
+#[gpui::test]
+fn test_update_excerpt_ranges_for_path(cx: &mut TestAppContext) {
+ let buffer = cx.new(|cx| {
+ Buffer::local(
+ indoc! {
+ "row 0
+ row 1
+ row 2
+ row 3
+ row 4
+ row 5
+ row 6
+ row 7
+ row 8
+ row 9
+ row 10
+ row 11
+ row 12
+ row 13
+ row 14
+ "},
+ cx,
+ )
+ });
+ let path = PathKey::with_sort_prefix(0, rel_path("test.rs").into_arc());
+
+ let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.set_excerpts_for_path(
+ path.clone(),
+ buffer.clone(),
+ vec![Point::row_range(2..4), Point::row_range(8..10)],
+ 0,
+ cx,
+ );
+ });
+ assert_excerpts_match(
+ &multibuffer,
+ cx,
+ indoc! {"-----
+ row 2
+ row 3
+ row 4
+ -----
+ row 8
+ row 9
+ row 10
+ "},
+ );
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.update_excerpts_for_path(
+ path.clone(),
+ buffer.clone(),
+ vec![Point::row_range(12..13)],
+ 0,
+ cx,
+ );
+ });
+ assert_excerpts_match(
+ &multibuffer,
+ cx,
+ indoc! {"-----
+ row 12
+ row 13
+ "},
+ );
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.set_excerpts_for_path(
+ path.clone(),
+ buffer.clone(),
+ vec![Point::row_range(2..4)],
+ 0,
+ cx,
+ );
+ });
+ assert_excerpts_match(
+ &multibuffer,
+ cx,
+ indoc! {"-----
+ row 2
+ row 3
+ row 4
+ "},
+ );
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.update_excerpts_for_path(
+ path.clone(),
+ buffer.clone(),
+ vec![Point::row_range(3..5)],
+ 0,
+ cx,
+ );
+ });
+ assert_excerpts_match(
+ &multibuffer,
+ cx,
+ indoc! {"-----
+ row 2
+ row 3
+ row 4
+ row 5
+ "},
+ );
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.set_excerpts_for_path(
+ path.clone(),
+ buffer.clone(),
+ vec![
+ Point::row_range(0..1),
+ Point::row_range(6..8),
+ Point::row_range(12..13),
+ ],
+ 0,
+ cx,
+ );
+ });
+ assert_excerpts_match(
+ &multibuffer,
+ cx,
+ indoc! {"-----
+ row 0
+ row 1
+ -----
+ row 6
+ row 7
+ row 8
+ -----
+ row 12
+ row 13
+ "},
+ );
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.update_excerpts_for_path(
+ path.clone(),
+ buffer.clone(),
+ vec![Point::row_range(7..9)],
+ 0,
+ cx,
+ );
+ });
+ assert_excerpts_match(
+ &multibuffer,
+ cx,
+ indoc! {"-----
+ row 6
+ row 7
+ row 8
+ row 9
+ "},
+ );
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.set_excerpts_for_path(
+ path.clone(),
+ buffer.clone(),
+ vec![Point::row_range(2..3), Point::row_range(6..7)],
+ 0,
+ cx,
+ );
+ });
+ assert_excerpts_match(
+ &multibuffer,
+ cx,
+ indoc! {"-----
+ row 2
+ row 3
+ -----
+ row 6
+ row 7
+ "},
+ );
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.update_excerpts_for_path(
+ path.clone(),
+ buffer.clone(),
+ vec![Point::row_range(3..6)],
+ 0,
+ cx,
+ );
+ });
+ assert_excerpts_match(
+ &multibuffer,
+ cx,
+ indoc! {"-----
+ row 2
+ row 3
+ row 4
+ row 5
+ row 6
+ row 7
+ "},
+ );
+}
+
#[gpui::test]
fn test_set_excerpts_for_buffer_rename(cx: &mut TestAppContext) {
let buf1 = cx.new(|cx| {
@@ -2179,19 +2240,418 @@ fn test_set_excerpts_for_buffer_rename(cx: &mut TestAppContext) {
}
#[gpui::test]
-async fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
- let base_text_1 = indoc!(
- "
- one
- two
+fn test_set_excerpts_for_path_replaces_previous_buffer(cx: &mut TestAppContext) {
+ let buffer_a = cx.new(|cx| {
+ Buffer::local(
+ indoc! {
+ "alpha
+ beta
+ gamma
+ delta
+ epsilon
+ ",
+ },
+ cx,
+ )
+ });
+ let buffer_b = cx.new(|cx| {
+ Buffer::local(
+ indoc! {
+ "one
+ two
three
- four
- five
- six
+ four
+ ",
+ },
+ cx,
+ )
+ });
+ let path: PathKey = PathKey::with_sort_prefix(0, rel_path("shared/path").into_arc());
+
+ let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
+ let removed_buffer_ids: Arc<RwLock<Vec<BufferId>>> = Default::default();
+ multibuffer.update(cx, |_, cx| {
+ let removed_buffer_ids = removed_buffer_ids.clone();
+ cx.subscribe(&multibuffer, move |_, _, event, _| {
+ if let Event::BuffersRemoved {
+ removed_buffer_ids: ids,
+ } = event
+ {
+ removed_buffer_ids.write().extend(ids.iter().copied());
+ }
+ })
+ .detach();
+ });
+
+ let ranges_a = vec![Point::row_range(0..1), Point::row_range(3..4)];
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.set_excerpts_for_path(path.clone(), buffer_a.clone(), ranges_a.clone(), 0, cx);
+ });
+ let (anchor_a1, anchor_a2) = multibuffer.read_with(cx, |multibuffer, cx| {
+ let snapshot = multibuffer.snapshot(cx);
+ let buffer_snapshot = buffer_a.read(cx).snapshot();
+ let mut anchors = ranges_a.into_iter().filter_map(|range| {
+ let text_range = buffer_snapshot.anchor_range_inside(range);
+ let start = snapshot.anchor_in_buffer(text_range.start)?;
+ let end = snapshot.anchor_in_buffer(text_range.end)?;
+ Some(start..end)
+ });
+ (
+ anchors.next().expect("should have first anchor"),
+ anchors.next().expect("should have second anchor"),
+ )
+ });
+
+ assert_excerpts_match(
+ &multibuffer,
+ cx,
+ indoc! {
+ "-----
+ alpha
+ beta
+ -----
+ delta
+ epsilon
"
+ },
);
- let text_1 = indoc!(
- "
+
+ let buffer_a_id = buffer_a.read_with(cx, |buffer, _| buffer.remote_id());
+ multibuffer.read_with(cx, |multibuffer, cx| {
+ let snapshot = multibuffer.snapshot(cx);
+ assert!(
+ snapshot
+ .excerpts()
+ .any(|excerpt| excerpt.context.start.buffer_id == buffer_a_id),
+ );
+ });
+
+ let ranges_b = vec![Point::row_range(1..2)];
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.set_excerpts_for_path(path.clone(), buffer_b.clone(), ranges_b.clone(), 1, cx);
+ });
+ let anchor_b = multibuffer.read_with(cx, |multibuffer, cx| {
+ let snapshot = multibuffer.snapshot(cx);
+ let buffer_snapshot = buffer_b.read(cx).snapshot();
+ ranges_b
+ .into_iter()
+ .filter_map(|range| {
+ let text_range = buffer_snapshot.anchor_range_inside(range);
+ let start = snapshot.anchor_in_buffer(text_range.start)?;
+ let end = snapshot.anchor_in_buffer(text_range.end)?;
+ Some(start..end)
+ })
+ .next()
+ .expect("should have an anchor")
+ });
+
+ let buffer_b_id = buffer_b.read_with(cx, |buffer, _| buffer.remote_id());
+ multibuffer.read_with(cx, |multibuffer, cx| {
+ let snapshot = multibuffer.snapshot(cx);
+ assert!(
+ !snapshot
+ .excerpts()
+ .any(|excerpt| excerpt.context.start.buffer_id == buffer_a_id),
+ );
+ assert!(
+ snapshot
+ .excerpts()
+ .any(|excerpt| excerpt.context.start.buffer_id == buffer_b_id),
+ );
+ assert!(
+ multibuffer.buffer(buffer_a_id).is_none(),
+ "old buffer should be fully removed from the multibuffer"
+ );
+ assert!(
+ multibuffer.buffer(buffer_b_id).is_some(),
+ "new buffer should be present in the multibuffer"
+ );
+ });
+ assert!(
+ removed_buffer_ids.read().contains(&buffer_a_id),
+ "BuffersRemoved event should have been emitted for the old buffer"
+ );
+
+ assert_excerpts_match(
+ &multibuffer,
+ cx,
+ indoc! {
+ "-----
+ one
+ two
+ three
+ four
+ "
+ },
+ );
+
+ multibuffer.read_with(cx, |multibuffer, cx| {
+ let snapshot = multibuffer.snapshot(cx);
+ anchor_a1.start.cmp(&anchor_b.start, &snapshot);
+ anchor_a1.end.cmp(&anchor_b.end, &snapshot);
+ anchor_a1.start.cmp(&anchor_a2.start, &snapshot);
+ anchor_a1.end.cmp(&anchor_a2.end, &snapshot);
+ });
+}
+
+#[gpui::test]
+fn test_stale_anchor_after_buffer_removal_and_path_reuse(cx: &mut TestAppContext) {
+ let buffer_a = cx.new(|cx| Buffer::local("aaa\nbbb\nccc\n", cx));
+ let buffer_b = cx.new(|cx| Buffer::local("xxx\nyyy\nzzz\n", cx));
+ let buffer_other = cx.new(|cx| Buffer::local("111\n222\n333\n", cx));
+ let path = PathKey::with_sort_prefix(0, rel_path("the/path").into_arc());
+ let other_path = PathKey::with_sort_prefix(1, rel_path("other/path").into_arc());
+
+ let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.set_excerpts_for_path(
+ path.clone(),
+ buffer_a.clone(),
+ [Point::new(0, 0)..Point::new(2, 3)],
+ 0,
+ cx,
+ );
+ multibuffer.set_excerpts_for_path(
+ other_path.clone(),
+ buffer_other.clone(),
+ [Point::new(0, 0)..Point::new(2, 3)],
+ 0,
+ cx,
+ );
+ });
+
+ buffer_a.update(cx, |buffer, cx| {
+ buffer.edit(
+ [(Point::new(1, 0)..Point::new(1, 0), "INSERTED ")],
+ None,
+ cx,
+ );
+ });
+
+ let stale_anchor = multibuffer.read_with(cx, |multibuffer, cx| {
+ let snapshot = multibuffer.snapshot(cx);
+ snapshot.anchor_before(Point::new(1, 5))
+ });
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.remove_excerpts(path.clone(), cx);
+ });
+
+ multibuffer.read_with(cx, |multibuffer, cx| {
+ let snapshot = multibuffer.snapshot(cx);
+ let offset = stale_anchor.to_offset(&snapshot);
+ assert!(
+ offset.0 <= snapshot.len().0,
+ "stale anchor resolved to offset {offset:?} but multibuffer len is {:?}",
+ snapshot.len()
+ );
+ });
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.set_excerpts_for_path(
+ path.clone(),
+ buffer_b.clone(),
+ [Point::new(0, 0)..Point::new(2, 3)],
+ 0,
+ cx,
+ );
+ });
+
+ multibuffer.read_with(cx, |multibuffer, cx| {
+ let snapshot = multibuffer.snapshot(cx);
+ let offset = stale_anchor.to_offset(&snapshot);
+ assert!(
+ offset.0 <= snapshot.len().0,
+ "stale anchor resolved to offset {offset:?} but multibuffer len is {:?}",
+ snapshot.len()
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_map_excerpt_ranges(cx: &mut TestAppContext) {
+ let base_text = indoc!(
+ "
+ {
+ (aaa)
+ (bbb)
+ (ccc)
+ }
+ xxx
+ yyy
+ zzz
+ [
+ (ddd)
+ (EEE)
+ ]
+ "
+ );
+ let text = indoc!(
+ "
+ {
+ (aaa)
+ (CCC)
+ }
+ xxx
+ yyy
+ zzz
+ [
+ (ddd)
+ (EEE)
+ ]
+ "
+ );
+
+ let buffer = cx.new(|cx| Buffer::local(text, cx));
+ let diff = cx
+ .new(|cx| BufferDiff::new_with_base_text(base_text, &buffer.read(cx).text_snapshot(), cx));
+ cx.run_until_parked();
+
+ let multibuffer = cx.new(|cx| {
+ let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
+ multibuffer.set_excerpts_for_path(
+ PathKey::sorted(0),
+ buffer.clone(),
+ [
+ Point::new(0, 0)..Point::new(3, 1),
+ Point::new(7, 0)..Point::new(10, 1),
+ ],
+ 0,
+ cx,
+ );
+ multibuffer.add_diff(diff.clone(), cx);
+ multibuffer
+ });
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
+ });
+ cx.run_until_parked();
+
+ let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
+
+ let actual_diff = format_diff(
+ &snapshot.text(),
+ &snapshot.row_infos(MultiBufferRow(0)).collect::<Vec<_>>(),
+ &Default::default(),
+ None,
+ );
+ pretty_assertions::assert_eq!(
+ actual_diff,
+ indoc!(
+ "
+ {
+ (aaa)
+ - (bbb)
+ - (ccc)
+ + (CCC)
+ } [\u{2193}]
+ [ [\u{2191}]
+ (ddd)
+ (EEE)
+ ] [\u{2193}]"
+ )
+ );
+
+ assert_eq!(
+ snapshot.map_excerpt_ranges(
+ snapshot.point_to_offset(Point::new(1, 3))..snapshot.point_to_offset(Point::new(1, 3)),
+ |buffer, excerpt_range, input_range| {
+ assert_eq!(
+ buffer.offset_to_point(input_range.start.0)
+ ..buffer.offset_to_point(input_range.end.0),
+ Point::new(1, 3)..Point::new(1, 3),
+ );
+ assert_eq!(
+ buffer.offset_to_point(excerpt_range.context.start.0)
+ ..buffer.offset_to_point(excerpt_range.context.end.0),
+ Point::new(0, 0)..Point::new(3, 1),
+ );
+ vec![
+ (input_range.start..BufferOffset(input_range.start.0 + 3), ()),
+ (excerpt_range.context, ()),
+ (
+ BufferOffset(text::ToOffset::to_offset(&Point::new(2, 2), buffer))
+ ..BufferOffset(text::ToOffset::to_offset(&Point::new(2, 7), buffer)),
+ (),
+ ),
+ (
+ BufferOffset(text::ToOffset::to_offset(&Point::new(0, 0), buffer))
+ ..BufferOffset(text::ToOffset::to_offset(&Point::new(2, 0), buffer)),
+ (),
+ ),
+ ]
+ },
+ ),
+ Some(vec![
+ (
+ snapshot.point_to_offset(Point::new(1, 3))
+ ..snapshot.point_to_offset(Point::new(1, 6)),
+ (),
+ ),
+ (
+ snapshot.point_to_offset(Point::zero())..snapshot.point_to_offset(Point::new(5, 1)),
+ ()
+ ),
+ (
+ snapshot.point_to_offset(Point::new(4, 2))
+ ..snapshot.point_to_offset(Point::new(4, 7)),
+ (),
+ ),
+ (
+ snapshot.point_to_offset(Point::zero())..snapshot.point_to_offset(Point::new(4, 0)),
+ ()
+ ),
+ ]),
+ );
+
+ assert_eq!(
+ snapshot.map_excerpt_ranges(
+ snapshot.point_to_offset(Point::new(5, 0))..snapshot.point_to_offset(Point::new(7, 0)),
+ |_, _, range| vec![(range, ())],
+ ),
+ None,
+ );
+
+ assert_eq!(
+ snapshot.map_excerpt_ranges(
+ snapshot.point_to_offset(Point::new(7, 3))..snapshot.point_to_offset(Point::new(7, 6)),
+ |buffer, excerpt_range, input_range| {
+ assert_eq!(
+ buffer.offset_to_point(input_range.start.0)
+ ..buffer.offset_to_point(input_range.end.0),
+ Point::new(8, 3)..Point::new(8, 6),
+ );
+ assert_eq!(
+ buffer.offset_to_point(excerpt_range.context.start.0)
+ ..buffer.offset_to_point(excerpt_range.context.end.0),
+ Point::new(7, 0)..Point::new(10, 1),
+ );
+ vec![(input_range, ())]
+ },
+ ),
+ Some(vec![(
+ snapshot.point_to_offset(Point::new(7, 3))..snapshot.point_to_offset(Point::new(7, 6)),
+ (),
+ )]),
+ );
+}
+
+#[gpui::test]
+async fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
+ let base_text_1 = indoc!(
+ "
+ one
+ two
+ three
+ four
+ five
+ six
+ "
+ );
+ let text_1 = indoc!(
+ "
ZERO
one
TWO
@@ -2273,7 +2733,7 @@ async fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) {
);
multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx);
+ multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx);
});
assert_new_snapshot(
@@ -2423,101 +2883,137 @@ struct ReferenceMultibuffer {
excerpts: Vec<ReferenceExcerpt>,
diffs: HashMap<BufferId, Entity<BufferDiff>>,
inverted_diffs: HashMap<BufferId, (Entity<BufferDiff>, Entity<language::Buffer>)>,
+ expanded_diff_hunks_by_buffer: HashMap<BufferId, Vec<text::Anchor>>,
}
-#[derive(Debug)]
+#[derive(Clone, Debug)]
struct ReferenceExcerpt {
- id: ExcerptId,
+ path_key: PathKey,
+ path_key_index: PathKeyIndex,
buffer: Entity<Buffer>,
range: Range<text::Anchor>,
- expanded_diff_hunks: Vec<text::Anchor>,
}
-#[derive(Debug)]
+#[derive(Clone, Debug)]
struct ReferenceRegion {
buffer_id: Option<BufferId>,
range: Range<usize>,
- buffer_range: Option<Range<Point>>,
+ buffer_range: Range<Point>,
+ // if this is a deleted hunk, the main buffer anchor to which the deleted content is attached
+ deleted_hunk_anchor: Option<text::Anchor>,
status: Option<DiffHunkStatus>,
- excerpt_id: Option<ExcerptId>,
+ excerpt: Option<ReferenceExcerpt>,
}
impl ReferenceMultibuffer {
- fn expand_excerpts(&mut self, excerpts: &HashSet<ExcerptId>, line_count: u32, cx: &App) {
- if line_count == 0 {
+ fn expand_excerpts(
+ &mut self,
+ excerpts: &HashSet<ExcerptRange<text::Anchor>>,
+ line_count: u32,
+ cx: &mut App,
+ ) {
+ use text::AnchorRangeExt as _;
+
+ if line_count == 0 || excerpts.is_empty() {
return;
}
- for id in excerpts {
- let excerpt = self.excerpts.iter_mut().find(|e| e.id == *id).unwrap();
- let snapshot = excerpt.buffer.read(cx).snapshot();
- let mut point_range = excerpt.range.to_point(&snapshot);
- point_range.start = Point::new(point_range.start.row.saturating_sub(line_count), 0);
- point_range.end =
- snapshot.clip_point(Point::new(point_range.end.row + line_count, 0), Bias::Left);
- point_range.end.column = snapshot.line_len(point_range.end.row);
- excerpt.range =
- snapshot.anchor_before(point_range.start)..snapshot.anchor_after(point_range.end);
+ let mut excerpts_by_buffer: HashMap<BufferId, Vec<ExcerptRange<text::Anchor>>> =
+ HashMap::default();
+ for excerpt in excerpts {
+ excerpts_by_buffer
+ .entry(excerpt.context.start.buffer_id)
+ .or_default()
+ .push(excerpt.clone())
}
- }
- fn remove_excerpt(&mut self, id: ExcerptId, cx: &App) {
- let ix = self
- .excerpts
- .iter()
- .position(|excerpt| excerpt.id == id)
- .unwrap();
- let excerpt = self.excerpts.remove(ix);
- let buffer = excerpt.buffer.read(cx);
- let buffer_id = buffer.remote_id();
- log::info!(
- "Removing excerpt {}: {:?}",
- ix,
- buffer
- .text_for_range(excerpt.range.to_offset(buffer))
- .collect::<String>(),
- );
- if !self
- .excerpts
- .iter()
- .any(|excerpt| excerpt.buffer.read(cx).remote_id() == buffer_id)
- {
- self.diffs.remove(&buffer_id);
- self.inverted_diffs.remove(&buffer_id);
+ for (buffer_id, excerpts_to_expand) in excerpts_by_buffer {
+ let mut buffer = None;
+ let mut buffer_snapshot = None;
+ let mut path = None;
+ let mut path_key_index = None;
+ let mut new_ranges =
+ self.excerpts
+ .iter()
+ .filter(|excerpt| excerpt.range.start.buffer_id == buffer_id)
+ .map(|excerpt| {
+ let snapshot = excerpt.buffer.read(cx).snapshot();
+ let mut range = excerpt.range.to_point(&snapshot);
+ if excerpts_to_expand.iter().any(|info| {
+ excerpt.range.contains_anchor(info.context.start, &snapshot)
+ }) {
+ range.start = Point::new(range.start.row.saturating_sub(line_count), 0);
+ range.end = snapshot
+ .clip_point(Point::new(range.end.row + line_count, 0), Bias::Left);
+ range.end.column = snapshot.line_len(range.end.row);
+ }
+ buffer = Some(excerpt.buffer.clone());
+ buffer_snapshot = Some(snapshot);
+ path = Some(excerpt.path_key.clone());
+ path_key_index = Some(excerpt.path_key_index);
+ ExcerptRange::new(range)
+ })
+ .collect::<Vec<_>>();
+
+ new_ranges.sort_by(|l, r| l.context.start.cmp(&r.context.start));
+
+ self.set_excerpts(
+ path.unwrap(),
+ path_key_index.unwrap(),
+ buffer.unwrap(),
+ &buffer_snapshot.unwrap(),
+ new_ranges,
+ cx,
+ );
}
}
- fn insert_excerpt_after(
+ fn set_excerpts(
&mut self,
- prev_id: ExcerptId,
- new_excerpt_id: ExcerptId,
- (buffer_handle, anchor_range): (Entity<Buffer>, Range<text::Anchor>),
+ path_key: PathKey,
+ path_key_index: PathKeyIndex,
+ buffer: Entity<Buffer>,
+ buffer_snapshot: &BufferSnapshot,
+ ranges: Vec<ExcerptRange<Point>>,
+ cx: &mut App,
) {
- let excerpt_ix = if prev_id == ExcerptId::max() {
- self.excerpts.len()
- } else {
- self.excerpts
- .iter()
- .position(|excerpt| excerpt.id == prev_id)
- .unwrap()
- + 1
- };
- self.excerpts.insert(
- excerpt_ix,
- ReferenceExcerpt {
- id: new_excerpt_id,
- buffer: buffer_handle,
- range: anchor_range,
- expanded_diff_hunks: Vec::new(),
- },
+ self.excerpts.retain(|excerpt| {
+ excerpt.path_key != path_key && excerpt.buffer.entity_id() != buffer.entity_id()
+ });
+
+ let ranges = MultiBuffer::merge_excerpt_ranges(&ranges);
+
+ let (Ok(ix) | Err(ix)) = self
+ .excerpts
+ .binary_search_by(|probe| probe.path_key.cmp(&path_key));
+ self.excerpts.splice(
+ ix..ix,
+ ranges.into_iter().map(|range| ReferenceExcerpt {
+ path_key: path_key.clone(),
+ path_key_index,
+ buffer: buffer.clone(),
+ range: buffer_snapshot.anchor_before(range.context.start)
+ ..buffer_snapshot.anchor_after(range.context.end),
+ }),
);
+ self.update_expanded_diff_hunks_for_buffer(buffer_snapshot.remote_id(), cx);
}
- fn expand_diff_hunks(&mut self, excerpt_id: ExcerptId, range: Range<text::Anchor>, cx: &App) {
+ fn expand_diff_hunks(&mut self, path_key: PathKey, range: Range<text::Anchor>, cx: &App) {
let excerpt = self
.excerpts
.iter_mut()
- .find(|e| e.id == excerpt_id)
+ .find(|e| {
+ e.path_key == path_key
+ && e.range
+ .start
+ .cmp(&range.start, &e.buffer.read(cx).snapshot())
+ .is_le()
+ && e.range
+ .end
+ .cmp(&range.end, &e.buffer.read(cx).snapshot())
+ .is_ge()
+ })
.unwrap();
let buffer = excerpt.buffer.read(cx).snapshot();
let buffer_id = buffer.remote_id();
@@ -1,24 +1,20 @@
-use std::{mem, ops::Range, sync::Arc};
+use std::{ops::Range, rc::Rc, sync::Arc};
-use collections::HashSet;
use gpui::{App, AppContext, Context, Entity};
use itertools::Itertools;
use language::{Buffer, BufferSnapshot};
use rope::Point;
-use text::{Bias, OffsetRangeExt, locator::Locator};
-use util::{post_inc, rel_path::RelPath};
+use sum_tree::{Dimensions, SumTree};
+use text::{Bias, BufferId, Edit, OffsetRangeExt, Patch};
+use util::rel_path::RelPath;
use ztracing::instrument;
use crate::{
- Anchor, ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer, build_excerpt_ranges,
+ Anchor, BufferState, BufferStateSnapshot, DiffChangeKind, Event, Excerpt, ExcerptOffset,
+ ExcerptRange, ExcerptSummary, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset,
+ PathKeyIndex, build_excerpt_ranges, remove_diff_state,
};
-#[derive(Debug, Clone)]
-pub struct PathExcerptInsertResult {
- pub excerpt_ids: Vec<ExcerptId>,
- pub added_new_excerpt: bool,
-}
-
#[derive(PartialEq, Eq, Ord, PartialOrd, Clone, Hash, Debug)]
pub struct PathKey {
// Used by the derived PartialOrd & Ord
@@ -27,6 +23,13 @@ pub struct PathKey {
}
impl PathKey {
+ pub fn min() -> Self {
+ Self {
+ sort_prefix: None,
+ path: RelPath::empty().into_arc(),
+ }
+ }
+
pub fn sorted(sort_prefix: u64) -> Self {
Self {
sort_prefix: Some(sort_prefix),
@@ -55,41 +58,17 @@ impl PathKey {
}
impl MultiBuffer {
- pub fn paths(&self) -> impl Iterator<Item = &PathKey> + '_ {
- self.excerpts_by_path.keys()
- }
-
- pub fn excerpts_for_path(&self, path: &PathKey) -> impl '_ + Iterator<Item = ExcerptId> {
- self.excerpts_by_path
- .get(path)
- .map(|excerpts| excerpts.as_slice())
- .unwrap_or_default()
- .iter()
- .copied()
- }
-
- pub fn path_for_excerpt(&self, excerpt: ExcerptId) -> Option<PathKey> {
- self.paths_by_excerpt.get(&excerpt).cloned()
- }
-
- pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context<Self>) {
- if let Some(to_remove) = self.excerpts_by_path.remove(&path) {
- self.remove_excerpts(to_remove, cx)
- }
- }
-
pub fn buffer_for_path(&self, path: &PathKey, cx: &App) -> Option<Entity<Buffer>> {
- let excerpt_id = self.excerpts_by_path.get(path)?.first()?;
- let snapshot = self.read(cx);
- let excerpt = snapshot.excerpt(*excerpt_id)?;
- self.buffer(excerpt.buffer_id)
+ let snapshot = self.snapshot(cx);
+ let excerpt = snapshot.excerpts_for_path(path).next()?;
+ self.buffer(excerpt.context.start.buffer_id)
}
pub fn location_for_path(&self, path: &PathKey, cx: &App) -> Option<Anchor> {
- let excerpt_id = self.excerpts_by_path.get(path)?.first()?;
- let snapshot = self.read(cx);
- let excerpt = snapshot.excerpt(*excerpt_id)?;
- Some(Anchor::in_buffer(excerpt.id, excerpt.range.context.start))
+ let snapshot = self.snapshot(cx);
+ let excerpt = snapshot.excerpts_for_path(path).next()?;
+ let path_key_index = snapshot.path_key_index_for_buffer(excerpt.context.start.buffer_id)?;
+ Some(Anchor::in_buffer(path_key_index, excerpt.context.start))
}
pub fn set_excerpts_for_buffer(
@@ -98,12 +77,14 @@ impl MultiBuffer {
ranges: impl IntoIterator<Item = Range<Point>>,
context_line_count: u32,
cx: &mut Context<Self>,
- ) -> (Vec<Range<Anchor>>, bool) {
+ ) -> bool {
let path = PathKey::for_buffer(&buffer, cx);
self.set_excerpts_for_path(path, buffer, ranges, context_line_count, cx)
}
/// Sets excerpts, returns `true` if at least one new excerpt was added.
+ ///
+ /// Any existing excerpts for this buffer or this path will be replaced by the provided ranges.
#[instrument(skip_all)]
pub fn set_excerpts_for_path(
&mut self,
@@ -112,20 +93,83 @@ impl MultiBuffer {
ranges: impl IntoIterator<Item = Range<Point>>,
context_line_count: u32,
cx: &mut Context<Self>,
- ) -> (Vec<Range<Anchor>>, bool) {
+ ) -> bool {
let buffer_snapshot = buffer.read(cx).snapshot();
+ let ranges: Vec<_> = ranges.into_iter().collect();
let excerpt_ranges = build_excerpt_ranges(ranges, context_line_count, &buffer_snapshot);
- let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges);
- self.set_merged_excerpt_ranges_for_path(
- path,
- buffer,
- excerpt_ranges,
+ let merged = Self::merge_excerpt_ranges(&excerpt_ranges);
+ let (inserted, _path_key_index) =
+ self.set_merged_excerpt_ranges_for_path(path, buffer, &buffer_snapshot, merged, cx);
+ inserted
+ }
+
+ /// Like [`Self::set_excerpts_for_path`], but expands the provided ranges to cover any overlapping existing excerpts
+ /// for the same buffer and path.
+ ///
+ /// Existing excerpts that do not overlap any of the provided ranges are discarded.
+ pub fn update_excerpts_for_path(
+ &mut self,
+ path: PathKey,
+ buffer: Entity<Buffer>,
+ ranges: impl IntoIterator<Item = Range<Point>>,
+ context_line_count: u32,
+ cx: &mut Context<Self>,
+ ) -> bool {
+ let buffer_snapshot = buffer.read(cx).snapshot();
+ let ranges: Vec<_> = ranges.into_iter().collect();
+ let excerpt_ranges = build_excerpt_ranges(ranges, context_line_count, &buffer_snapshot);
+ let merged = self.merge_new_with_existing_excerpt_ranges(
+ &path,
&buffer_snapshot,
- new,
- counts,
+ excerpt_ranges,
cx,
- )
+ );
+
+ let (inserted, _path_key_index) =
+ self.set_merged_excerpt_ranges_for_path(path, buffer, &buffer_snapshot, merged, cx);
+ inserted
+ }
+
+ pub fn merge_new_with_existing_excerpt_ranges(
+ &self,
+ path: &PathKey,
+ buffer_snapshot: &BufferSnapshot,
+ mut excerpt_ranges: Vec<ExcerptRange<Point>>,
+ cx: &App,
+ ) -> Vec<ExcerptRange<Point>> {
+ let multibuffer_snapshot = self.snapshot(cx);
+
+ if multibuffer_snapshot.path_for_buffer(buffer_snapshot.remote_id()) == Some(path) {
+ excerpt_ranges.sort_by_key(|range| range.context.start);
+ let mut combined_ranges = Vec::new();
+ let mut new_ranges = excerpt_ranges.into_iter().peekable();
+ for existing_range in
+ multibuffer_snapshot.excerpts_for_buffer(buffer_snapshot.remote_id())
+ {
+ let existing_range = ExcerptRange {
+ context: existing_range.context.to_point(buffer_snapshot),
+ primary: existing_range.primary.to_point(buffer_snapshot),
+ };
+ while let Some(new_range) = new_ranges.peek()
+ && new_range.context.end < existing_range.context.start
+ {
+ combined_ranges.push(new_range.clone());
+ new_ranges.next();
+ }
+
+ if let Some(new_range) = new_ranges.peek()
+ && new_range.context.start <= existing_range.context.end
+ {
+ combined_ranges.push(existing_range)
+ }
+ }
+ combined_ranges.extend(new_ranges);
+ excerpt_ranges = combined_ranges;
+ }
+
+ excerpt_ranges.sort_by_key(|range| range.context.start);
+ Self::merge_excerpt_ranges(&excerpt_ranges)
}
pub fn set_excerpt_ranges_for_path(
@@ -135,17 +179,11 @@ impl MultiBuffer {
buffer_snapshot: &BufferSnapshot,
excerpt_ranges: Vec<ExcerptRange<Point>>,
cx: &mut Context<Self>,
- ) -> (Vec<Range<Anchor>>, bool) {
- let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges);
- self.set_merged_excerpt_ranges_for_path(
- path,
- buffer,
- excerpt_ranges,
- buffer_snapshot,
- new,
- counts,
- cx,
- )
+ ) -> bool {
+ let merged = Self::merge_excerpt_ranges(&excerpt_ranges);
+ let (inserted, _path_key_index) =
+ self.set_merged_excerpt_ranges_for_path(path, buffer, buffer_snapshot, merged, cx);
+ inserted
}
pub fn set_anchored_excerpts_for_path(
@@ -161,350 +199,505 @@ impl MultiBuffer {
let mut app = cx.to_async();
async move {
let snapshot = buffer_snapshot.clone();
- let (excerpt_ranges, new, counts) = app
+ let (ranges, merged_excerpt_ranges) = app
.background_spawn(async move {
- let ranges = ranges.into_iter().map(|range| range.to_point(&snapshot));
+ let point_ranges = ranges.iter().map(|range| range.to_point(&snapshot));
let excerpt_ranges =
- build_excerpt_ranges(ranges, context_line_count, &snapshot);
- let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges);
- (excerpt_ranges, new, counts)
+ build_excerpt_ranges(point_ranges, context_line_count, &snapshot);
+ let merged = Self::merge_excerpt_ranges(&excerpt_ranges);
+ (ranges, merged)
})
.await;
multi_buffer
.update(&mut app, move |multi_buffer, cx| {
- let (ranges, _) = multi_buffer.set_merged_excerpt_ranges_for_path(
+ let (_, path_key_index) = multi_buffer.set_merged_excerpt_ranges_for_path(
path_key,
buffer,
- excerpt_ranges,
&buffer_snapshot,
- new,
- counts,
+ merged_excerpt_ranges,
cx,
);
ranges
+ .into_iter()
+ .map(|range| Anchor::range_in_buffer(path_key_index, range))
+ .collect()
})
.ok()
.unwrap_or_default()
}
}
- pub(super) fn expand_excerpts_with_paths(
+ pub fn expand_excerpts(
&mut self,
- ids: impl IntoIterator<Item = ExcerptId>,
+ anchors: impl IntoIterator<Item = Anchor>,
line_count: u32,
direction: ExpandExcerptDirection,
cx: &mut Context<Self>,
) {
- let mut sorted_ids: Vec<ExcerptId> = ids.into_iter().collect();
- sorted_ids.sort_by(|a, b| {
- let path_a = self.paths_by_excerpt.get(a);
- let path_b = self.paths_by_excerpt.get(b);
- path_a.cmp(&path_b)
- });
- let grouped = sorted_ids
- .into_iter()
- .chunk_by(|id| self.paths_by_excerpt.get(id).cloned())
+ if line_count == 0 {
+ return;
+ }
+
+ let snapshot = self.snapshot(cx);
+ let mut sorted_anchors = anchors
.into_iter()
- .filter_map(|(k, v)| Some((k?, v.into_iter().collect::<Vec<_>>())))
+ .filter_map(|anchor| anchor.excerpt_anchor())
.collect::<Vec<_>>();
- let snapshot = self.snapshot(cx);
-
- for (path, ids) in grouped.into_iter() {
- let Some(excerpt_ids) = self.excerpts_by_path.get(&path) else {
+ if sorted_anchors.is_empty() {
+ return;
+ }
+ sorted_anchors.sort_by(|a, b| a.cmp(b, &snapshot));
+ let buffers = sorted_anchors.into_iter().chunk_by(|anchor| anchor.path);
+ let mut cursor = snapshot.excerpts.cursor::<ExcerptSummary>(());
+
+ for (path_index, excerpt_anchors) in &buffers {
+ let path = snapshot
+ .path_keys_by_index
+ .get(&path_index)
+ .expect("anchor from wrong multibuffer");
+
+ let mut excerpt_anchors = excerpt_anchors.peekable();
+ let mut ranges = Vec::new();
+
+ cursor.seek_forward(path, Bias::Left);
+ let Some((buffer, buffer_snapshot)) = cursor
+ .item()
+ .map(|excerpt| (excerpt.buffer(&self), excerpt.buffer_snapshot(&snapshot)))
+ else {
continue;
};
- let ids_to_expand = HashSet::from_iter(ids);
- let mut excerpt_id_ = None;
- let expanded_ranges = excerpt_ids.iter().filter_map(|excerpt_id| {
- let excerpt = snapshot.excerpt(*excerpt_id)?;
- let excerpt_id = excerpt.id;
- if excerpt_id_.is_none() {
- excerpt_id_ = Some(excerpt_id);
+ while let Some(excerpt) = cursor.item()
+ && &excerpt.path_key == path
+ {
+ let mut range = ExcerptRange {
+ context: excerpt.range.context.to_point(buffer_snapshot),
+ primary: excerpt.range.primary.to_point(buffer_snapshot),
+ };
+
+ let mut needs_expand = false;
+ while excerpt_anchors.peek().is_some_and(|anchor| {
+ excerpt
+ .range
+ .contains(&anchor.text_anchor(), buffer_snapshot)
+ }) {
+ needs_expand = true;
+ excerpt_anchors.next();
}
- let mut context = excerpt.range.context.to_point(&excerpt.buffer);
- if ids_to_expand.contains(&excerpt_id) {
+ if needs_expand {
match direction {
ExpandExcerptDirection::Up => {
- context.start.row = context.start.row.saturating_sub(line_count);
- context.start.column = 0;
+ range.context.start.row =
+ range.context.start.row.saturating_sub(line_count);
+ range.context.start.column = 0;
}
ExpandExcerptDirection::Down => {
- context.end.row =
- (context.end.row + line_count).min(excerpt.buffer.max_point().row);
- context.end.column = excerpt.buffer.line_len(context.end.row);
+ range.context.end.row = (range.context.end.row + line_count)
+ .min(excerpt.buffer_snapshot(&snapshot).max_point().row);
+ range.context.end.column = excerpt
+ .buffer_snapshot(&snapshot)
+ .line_len(range.context.end.row);
}
ExpandExcerptDirection::UpAndDown => {
- context.start.row = context.start.row.saturating_sub(line_count);
- context.start.column = 0;
- context.end.row =
- (context.end.row + line_count).min(excerpt.buffer.max_point().row);
- context.end.column = excerpt.buffer.line_len(context.end.row);
+ range.context.start.row =
+ range.context.start.row.saturating_sub(line_count);
+ range.context.start.column = 0;
+ range.context.end.row = (range.context.end.row + line_count)
+ .min(excerpt.buffer_snapshot(&snapshot).max_point().row);
+ range.context.end.column = excerpt
+ .buffer_snapshot(&snapshot)
+ .line_len(range.context.end.row);
}
}
}
- Some(ExcerptRange {
- context,
- primary: excerpt.range.primary.to_point(&excerpt.buffer),
- })
- });
- let mut merged_ranges: Vec<ExcerptRange<Point>> = Vec::new();
- for range in expanded_ranges {
- if let Some(last_range) = merged_ranges.last_mut()
- && last_range.context.end >= range.context.start
- {
- last_range.context.end = range.context.end;
- continue;
- }
- merged_ranges.push(range)
+ ranges.push(range);
+ cursor.next();
}
- let Some(excerpt_id) = excerpt_id_ else {
- continue;
- };
- let Some(buffer_id) = &snapshot.buffer_id_for_excerpt(excerpt_id) else {
- continue;
- };
- let Some(buffer) = self.buffers.get(buffer_id).map(|b| b.buffer.clone()) else {
- continue;
- };
+ ranges.sort_by(|l, r| l.context.start.cmp(&r.context.start));
- let buffer_snapshot = buffer.read(cx).snapshot();
- self.update_path_excerpts(path.clone(), buffer, &buffer_snapshot, merged_ranges, cx);
+ self.set_excerpt_ranges_for_path(path.clone(), buffer, buffer_snapshot, ranges, cx);
}
}
/// Sets excerpts, returns `true` if at least one new excerpt was added.
- fn set_merged_excerpt_ranges_for_path(
+ pub(crate) fn set_merged_excerpt_ranges_for_path<T>(
&mut self,
path: PathKey,
buffer: Entity<Buffer>,
- ranges: Vec<ExcerptRange<Point>>,
buffer_snapshot: &BufferSnapshot,
- new: Vec<ExcerptRange<Point>>,
- counts: Vec<usize>,
+ new: Vec<ExcerptRange<T>>,
cx: &mut Context<Self>,
- ) -> (Vec<Range<Anchor>>, bool) {
- let insert_result = self.update_path_excerpts(path, buffer, buffer_snapshot, new, cx);
-
- let mut result = Vec::new();
- let mut ranges = ranges.into_iter();
- for (excerpt_id, range_count) in insert_result
- .excerpt_ids
+ ) -> (bool, PathKeyIndex)
+ where
+ T: language::ToOffset,
+ {
+ let anchor_ranges = new
.into_iter()
- .zip(counts.into_iter())
- {
- for range in ranges.by_ref().take(range_count) {
- let range = Anchor::range_in_buffer(
- excerpt_id,
- buffer_snapshot.anchor_before(&range.primary.start)
- ..buffer_snapshot.anchor_after(&range.primary.end),
- );
- result.push(range)
- }
+ .map(|r| ExcerptRange {
+ context: buffer_snapshot.anchor_before(r.context.start)
+ ..buffer_snapshot.anchor_after(r.context.end),
+ primary: buffer_snapshot.anchor_before(r.primary.start)
+ ..buffer_snapshot.anchor_after(r.primary.end),
+ })
+ .collect::<Vec<_>>();
+ let inserted =
+ self.update_path_excerpts(path.clone(), buffer, buffer_snapshot, &anchor_ranges, cx);
+ let path_key_index = self.get_or_create_path_key_index(&path);
+ (inserted, path_key_index)
+ }
+
+ pub(crate) fn get_or_create_path_key_index(&mut self, path_key: &PathKey) -> PathKeyIndex {
+ let mut snapshot = self.snapshot.borrow_mut();
+
+ if let Some(&existing) = snapshot.indices_by_path_key.get(path_key) {
+ return existing;
}
- (result, insert_result.added_new_excerpt)
+
+ let index = snapshot
+ .path_keys_by_index
+ .last()
+ .map(|(index, _)| PathKeyIndex(index.0 + 1))
+ .unwrap_or(PathKeyIndex(0));
+ snapshot.path_keys_by_index.insert(index, path_key.clone());
+ snapshot.indices_by_path_key.insert(path_key.clone(), index);
+ index
}
pub fn update_path_excerpts(
&mut self,
- path: PathKey,
+ path_key: PathKey,
buffer: Entity<Buffer>,
buffer_snapshot: &BufferSnapshot,
- new: Vec<ExcerptRange<Point>>,
+ to_insert: &Vec<ExcerptRange<text::Anchor>>,
cx: &mut Context<Self>,
- ) -> PathExcerptInsertResult {
- let mut insert_after = self
- .excerpts_by_path
- .range(..path.clone())
- .next_back()
- .and_then(|(_, value)| value.last().copied())
- .unwrap_or(ExcerptId::min());
-
- let existing = self
- .excerpts_by_path
- .get(&path)
- .cloned()
- .unwrap_or_default();
- let mut new_iter = new.into_iter().peekable();
- let mut existing_iter = existing.into_iter().peekable();
-
- let mut excerpt_ids = Vec::new();
- let mut to_remove = Vec::new();
- let mut to_insert: Vec<(ExcerptId, ExcerptRange<Point>)> = Vec::new();
- let mut added_a_new_excerpt = false;
- let snapshot = self.snapshot(cx);
+ ) -> bool {
+ let path_key_index = self.get_or_create_path_key_index(&path_key);
+ if let Some(old_path_key) = self
+ .snapshot(cx)
+ .path_for_buffer(buffer_snapshot.remote_id())
+ && old_path_key != &path_key
+ {
+ self.remove_excerpts(old_path_key.clone(), cx);
+ }
- let mut next_excerpt_id =
- if let Some(last_entry) = self.snapshot.get_mut().excerpt_ids.last() {
- last_entry.id.0 + 1
- } else {
- 1
- };
+ if to_insert.len() == 0 {
+ self.remove_excerpts(path_key.clone(), cx);
- let mut next_excerpt_id = move || ExcerptId(post_inc(&mut next_excerpt_id));
+ return false;
+ }
+ assert_eq!(self.history.transaction_depth(), 0);
+ self.sync_mut(cx);
- let mut excerpts_cursor = snapshot.excerpts.cursor::<Option<&Locator>>(());
- excerpts_cursor.next();
+ let buffer_id = buffer_snapshot.remote_id();
- loop {
- let existing = if let Some(&existing_id) = existing_iter.peek() {
- let locator = snapshot.excerpt_locator_for_id(existing_id);
- excerpts_cursor.seek_forward(&Some(locator), Bias::Left);
- if let Some(excerpt) = excerpts_cursor.item() {
- if excerpt.buffer_id != buffer_snapshot.remote_id() {
- to_remove.push(existing_id);
- existing_iter.next();
- continue;
- }
- Some((existing_id, excerpt.range.context.to_point(buffer_snapshot)))
- } else {
- None
- }
- } else {
- None
+ let mut snapshot = self.snapshot.get_mut();
+ let mut cursor = snapshot
+ .excerpts
+ .cursor::<Dimensions<PathKey, ExcerptOffset>>(());
+ let mut new_excerpts = SumTree::new(());
+
+ let new_ranges = to_insert.clone();
+ let mut to_insert = to_insert.iter().peekable();
+ let mut patch = Patch::empty();
+ let mut added_new_excerpt = false;
+
+ new_excerpts.append(cursor.slice(&path_key, Bias::Left), ());
+
+ // handle the case where the path key used to be associated
+ // with a different buffer by removing its excerpts.
+ if let Some(excerpt) = cursor.item()
+ && &excerpt.path_key == &path_key
+ && excerpt.buffer_id != buffer_id
+ {
+ let old_buffer_id = excerpt.buffer_id;
+ self.buffers.remove(&old_buffer_id);
+ snapshot.buffers.remove(&old_buffer_id);
+ remove_diff_state(&mut snapshot.diffs, old_buffer_id);
+ self.diffs.remove(&old_buffer_id);
+ let before = cursor.position.1;
+ cursor.seek_forward(&path_key, Bias::Right);
+ let after = cursor.position.1;
+ patch.push(Edit {
+ old: before..after,
+ new: new_excerpts.summary().len()..new_excerpts.summary().len(),
+ });
+ cx.emit(Event::BuffersRemoved {
+ removed_buffer_ids: vec![old_buffer_id],
+ });
+ }
+
+ while let Some(excerpt) = cursor.item()
+ && excerpt.path_key == path_key
+ {
+ assert_eq!(excerpt.buffer_id, buffer_id);
+ let Some(next_excerpt) = to_insert.peek() else {
+ break;
};
+ if &excerpt.range == *next_excerpt {
+ let before = new_excerpts.summary().len();
+ new_excerpts.update_last(
+ |prev_excerpt| {
+ if !prev_excerpt.has_trailing_newline {
+ prev_excerpt.has_trailing_newline = true;
+ patch.push(Edit {
+ old: cursor.position.1..cursor.position.1,
+ new: before..before + MultiBufferOffset(1),
+ });
+ }
+ },
+ (),
+ );
+ new_excerpts.push(excerpt.clone(), ());
+ to_insert.next();
+ cursor.next();
+ continue;
+ }
- let new = new_iter.peek();
- // Try to merge the next new range or existing excerpt into the last
- // queued insert.
- if let Some((last_id, last)) = to_insert.last_mut() {
- // Next new range overlaps the last queued insert: absorb it by
- // extending the insert's end.
- if let Some(new) = new
- && last.context.end >= new.context.start
- {
- last.context.end = last.context.end.max(new.context.end);
- excerpt_ids.push(*last_id);
- new_iter.next();
- continue;
- }
- // Next existing excerpt overlaps the last queued insert: absorb
- // it by extending the insert's end, and record the existing
- // excerpt as replaced so anchors in it resolve to the new one.
- if let Some((existing_id, existing_range)) = &existing
- && last.context.end >= existing_range.start
- {
- last.context.end = last.context.end.max(existing_range.end);
- to_remove.push(*existing_id);
- Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts)
- .insert(*existing_id, *last_id);
- existing_iter.next();
- continue;
- }
+ if excerpt
+ .range
+ .context
+ .start
+ .cmp(&next_excerpt.context.start, &buffer_snapshot)
+ .is_le()
+ {
+ // remove old excerpt
+ let before = cursor.position.1;
+ cursor.next();
+ let after = cursor.position.1;
+ patch.push(Edit {
+ old: before..after,
+ new: new_excerpts.summary().len()..new_excerpts.summary().len(),
+ });
+ } else {
+ // insert new excerpt
+ let next_excerpt = to_insert.next().unwrap();
+ added_new_excerpt = true;
+ let before = new_excerpts.summary().len();
+ new_excerpts.update_last(
+ |prev_excerpt| {
+ prev_excerpt.has_trailing_newline = true;
+ },
+ (),
+ );
+ new_excerpts.push(
+ Excerpt::new(
+ path_key.clone(),
+ path_key_index,
+ &buffer_snapshot,
+ next_excerpt.clone(),
+ false,
+ ),
+ (),
+ );
+ let after = new_excerpts.summary().len();
+ patch.push_maybe_empty(Edit {
+ old: cursor.position.1..cursor.position.1,
+ new: before..after,
+ });
}
+ }
- match (new, existing) {
- (None, None) => break,
+ // remove any further trailing excerpts
+ let mut before = cursor.position.1;
+ cursor.seek_forward(&path_key, Bias::Right);
+ let after = cursor.position.1;
+ // if we removed the previous last excerpt, remove the trailing newline from the new last excerpt
+ if cursor.item().is_none() && to_insert.peek().is_none() {
+ new_excerpts.update_last(
+ |excerpt| {
+ if excerpt.has_trailing_newline {
+ before.0.0 = before
+ .0
+ .0
+ .checked_sub(1)
+ .expect("should have preceding excerpt");
+ excerpt.has_trailing_newline = false;
+ }
+ },
+ (),
+ );
+ }
+ patch.push(Edit {
+ old: before..after,
+ new: new_excerpts.summary().len()..new_excerpts.summary().len(),
+ });
- // No more new ranges; remove the remaining existing excerpt.
- (None, Some((existing_id, _))) => {
- existing_iter.next();
- to_remove.push(existing_id);
- }
+ while let Some(next_excerpt) = to_insert.next() {
+ added_new_excerpt = true;
+ let before = new_excerpts.summary().len();
+ new_excerpts.update_last(
+ |prev_excerpt| {
+ prev_excerpt.has_trailing_newline = true;
+ },
+ (),
+ );
+ new_excerpts.push(
+ Excerpt::new(
+ path_key.clone(),
+ path_key_index,
+ &buffer_snapshot,
+ next_excerpt.clone(),
+ false,
+ ),
+ (),
+ );
+ let after = new_excerpts.summary().len();
+ patch.push_maybe_empty(Edit {
+ old: cursor.position.1..cursor.position.1,
+ new: before..after,
+ });
+ }
- // No more existing excerpts; queue the new range for insertion.
- (Some(_), None) => {
- added_a_new_excerpt = true;
- let new_id = next_excerpt_id();
- excerpt_ids.push(new_id);
- to_insert.push((new_id, new_iter.next().unwrap()));
- }
+ let suffix_start = cursor.position.1;
+ let suffix = cursor.suffix();
+ let changed_trailing_excerpt = suffix.is_empty();
+ if !suffix.is_empty() {
+ let before = new_excerpts.summary().len();
+ new_excerpts.update_last(
+ |prev_excerpt| {
+ if !prev_excerpt.has_trailing_newline {
+ prev_excerpt.has_trailing_newline = true;
+ patch.push(Edit {
+ old: suffix_start..suffix_start,
+ new: before..before + MultiBufferOffset(1),
+ });
+ }
+ },
+ (),
+ );
+ }
+ new_excerpts.append(suffix, ());
+ drop(cursor);
+
+ snapshot.excerpts = new_excerpts;
+ snapshot.buffers.insert(
+ buffer_id,
+ BufferStateSnapshot {
+ path_key: path_key.clone(),
+ path_key_index,
+ buffer_snapshot: buffer_snapshot.clone(),
+ },
+ );
+
+ self.buffers.entry(buffer_id).or_insert_with(|| {
+ self.buffer_changed_since_sync.replace(true);
+ buffer.update(cx, |buffer, _| {
+ buffer.record_changes(Rc::downgrade(&self.buffer_changed_since_sync));
+ });
+ BufferState {
+ _subscriptions: [
+ cx.observe(&buffer, |_, _, cx| cx.notify()),
+ cx.subscribe(&buffer, Self::on_buffer_event),
+ ],
+ buffer: buffer.clone(),
+ }
+ });
- // Existing excerpt ends before the new range starts, so it
- // has no corresponding new range and must be removed. Flush
- // pending inserts and advance `insert_after` past it so that
- // future inserts receive locators *after* this excerpt's
- // locator, preserving forward ordering.
- (Some(new), Some((_, existing_range)))
- if existing_range.end < new.context.start =>
- {
- self.insert_excerpts_with_ids_after(
- insert_after,
- buffer.clone(),
- mem::take(&mut to_insert),
- cx,
- );
- insert_after = existing_iter.next().unwrap();
- to_remove.push(insert_after);
- }
- // New range ends before the existing excerpt starts, so the
- // new range has no corresponding existing excerpt. Queue it
- // for insertion at the current `insert_after` position
- // (before the existing excerpt), which is the correct
- // spatial ordering.
- (Some(new), Some((_, existing_range)))
- if existing_range.start > new.context.end =>
- {
- let new_id = next_excerpt_id();
- excerpt_ids.push(new_id);
- to_insert.push((new_id, new_iter.next().unwrap()));
- }
- // Exact match: keep the existing excerpt in place, flush
- // any pending inserts before it, and use it as the new
- // `insert_after` anchor.
- (Some(new), Some((_, existing_range)))
- if existing_range.start == new.context.start
- && existing_range.end == new.context.end =>
- {
- self.insert_excerpts_with_ids_after(
- insert_after,
- buffer.clone(),
- mem::take(&mut to_insert),
- cx,
- );
- insert_after = existing_iter.next().unwrap();
- excerpt_ids.push(insert_after);
- new_iter.next();
- }
+ if changed_trailing_excerpt {
+ snapshot.trailing_excerpt_update_count += 1;
+ }
- // Partial overlap: replace the existing excerpt with a new
- // one whose range is the union of both, and record the
- // replacement so that anchors in the old excerpt resolve to
- // the new one.
- (Some(_), Some((_, existing_range))) => {
- let existing_id = existing_iter.next().unwrap();
- let new_id = next_excerpt_id();
- Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts)
- .insert(existing_id, new_id);
- to_remove.push(existing_id);
- let mut range = new_iter.next().unwrap();
- range.context.start = range.context.start.min(existing_range.start);
- range.context.end = range.context.end.max(existing_range.end);
- excerpt_ids.push(new_id);
- to_insert.push((new_id, range));
- }
- };
+ let edits = Self::sync_diff_transforms(
+ &mut snapshot,
+ patch.into_inner(),
+ DiffChangeKind::BufferEdited,
+ );
+ if !edits.is_empty() {
+ self.subscriptions.publish(edits);
}
- self.insert_excerpts_with_ids_after(insert_after, buffer, to_insert, cx);
- // todo(lw): There is a logic bug somewhere that causes the to_remove vector to be not ordered correctly
- to_remove.sort_by_cached_key(|&id| snapshot.excerpt_locator_for_id(id));
- self.remove_excerpts(to_remove, cx);
+ cx.emit(Event::Edited {
+ edited_buffer: None,
+ is_local: true,
+ });
+ cx.emit(Event::BufferRangesUpdated {
+ buffer,
+ path_key: path_key.clone(),
+ ranges: new_ranges,
+ });
+ cx.notify();
- if excerpt_ids.is_empty() {
- self.excerpts_by_path.remove(&path);
- } else {
- let snapshot = &*self.snapshot.get_mut();
- let excerpt_ids = excerpt_ids
- .iter()
- .dedup()
- .cloned()
- // todo(lw): There is a logic bug somewhere that causes excerpt_ids to not necessarily be in order by locator
- .sorted_by_cached_key(|&id| snapshot.excerpt_locator_for_id(id))
- .collect();
- for &excerpt_id in &excerpt_ids {
- self.paths_by_excerpt.insert(excerpt_id, path.clone());
- }
- self.excerpts_by_path.insert(path, excerpt_ids);
+ added_new_excerpt
+ }
+
+ pub fn remove_excerpts_for_buffer(&mut self, buffer: BufferId, cx: &mut Context<Self>) {
+ let snapshot = self.sync_mut(cx);
+ let Some(path) = snapshot.path_for_buffer(buffer).cloned() else {
+ return;
+ };
+ self.remove_excerpts(path, cx);
+ }
+
+ pub fn remove_excerpts(&mut self, path: PathKey, cx: &mut Context<Self>) {
+ assert_eq!(self.history.transaction_depth(), 0);
+ self.sync_mut(cx);
+
+ let mut snapshot = self.snapshot.get_mut();
+ let mut cursor = snapshot
+ .excerpts
+ .cursor::<Dimensions<PathKey, ExcerptOffset>>(());
+ let mut new_excerpts = SumTree::new(());
+ new_excerpts.append(cursor.slice(&path, Bias::Left), ());
+ let mut edit_start = cursor.position.1;
+ let mut buffer_id = None;
+ if let Some(excerpt) = cursor.item()
+ && excerpt.path_key == path
+ {
+ buffer_id = Some(excerpt.buffer_id);
}
+ cursor.seek(&path, Bias::Right);
+ let edit_end = cursor.position.1;
+ let suffix = cursor.suffix();
+ let changed_trailing_excerpt = suffix.is_empty();
+ new_excerpts.append(suffix, ());
+
+ if let Some(buffer_id) = buffer_id {
+ snapshot.buffers.remove(&buffer_id);
+ remove_diff_state(&mut snapshot.diffs, buffer_id);
+ self.buffers.remove(&buffer_id);
+ self.diffs.remove(&buffer_id);
+ cx.emit(Event::BuffersRemoved {
+ removed_buffer_ids: vec![buffer_id],
+ })
+ }
+ drop(cursor);
+ if changed_trailing_excerpt {
+ snapshot.trailing_excerpt_update_count += 1;
+ new_excerpts.update_last(
+ |excerpt| {
+ if excerpt.has_trailing_newline {
+ excerpt.has_trailing_newline = false;
+ edit_start.0.0 = edit_start
+ .0
+ .0
+ .checked_sub(1)
+ .expect("should have at least one excerpt");
+ }
+ },
+ (),
+ )
+ }
+
+ let edit = Edit {
+ old: edit_start..edit_end,
+ new: edit_start..edit_start,
+ };
+ snapshot.excerpts = new_excerpts;
- PathExcerptInsertResult {
- excerpt_ids,
- added_new_excerpt: added_a_new_excerpt,
+ let edits =
+ Self::sync_diff_transforms(&mut snapshot, vec![edit], DiffChangeKind::BufferEdited);
+ if !edits.is_empty() {
+ self.subscriptions.publish(edits);
}
+
+ cx.emit(Event::Edited {
+ edited_buffer: None,
+ is_local: true,
+ });
+ cx.notify();
}
}
@@ -2,15 +2,15 @@ use gpui::{App, Context, Entity};
use language::{self, Buffer, TransactionId};
use std::{
collections::HashMap,
- ops::{AddAssign, Range, Sub},
+ ops::Range,
time::{Duration, Instant},
};
use sum_tree::Bias;
use text::BufferId;
-use crate::{BufferState, MultiBufferDimension};
+use crate::{Anchor, BufferState, MultiBufferOffset};
-use super::{Event, ExcerptSummary, MultiBuffer};
+use super::{Event, MultiBuffer};
#[derive(Clone)]
pub(super) struct History {
@@ -314,71 +314,50 @@ impl MultiBuffer {
}
}
- pub fn edited_ranges_for_transaction<D>(
+ pub fn edited_ranges_for_transaction(
&self,
transaction_id: TransactionId,
cx: &App,
- ) -> Vec<Range<D>>
- where
- D: MultiBufferDimension
- + Ord
- + Sub<D, Output = D::TextDimension>
- + AddAssign<D::TextDimension>,
- D::TextDimension: PartialOrd + Sub<D::TextDimension, Output = D::TextDimension>,
- {
+ ) -> Vec<Range<MultiBufferOffset>> {
let Some(transaction) = self.history.transaction(transaction_id) else {
return Vec::new();
};
- let mut ranges = Vec::new();
let snapshot = self.read(cx);
- let mut cursor = snapshot.excerpts.cursor::<ExcerptSummary>(());
+ let mut buffer_anchors = Vec::new();
for (buffer_id, buffer_transaction) in &transaction.buffer_transactions {
- let Some(buffer_state) = self.buffers.get(buffer_id) else {
+ let Some(buffer) = self.buffer(*buffer_id) else {
continue;
};
+ let Some(excerpt) = snapshot.first_excerpt_for_buffer(*buffer_id) else {
+ continue;
+ };
+ let buffer_snapshot = buffer.read(cx).snapshot();
- let buffer = buffer_state.buffer.read(cx);
- for range in
- buffer.edited_ranges_for_transaction_id::<D::TextDimension>(*buffer_transaction)
+ for range in buffer
+ .read(cx)
+ .edited_ranges_for_transaction_id::<usize>(*buffer_transaction)
{
- for excerpt_id in &buffer_state.excerpts {
- cursor.seek(excerpt_id, Bias::Left);
- if let Some(excerpt) = cursor.item()
- && excerpt.locator == *excerpt_id
- {
- let excerpt_buffer_start = excerpt
- .range
- .context
- .start
- .summary::<D::TextDimension>(buffer);
- let excerpt_buffer_end = excerpt
- .range
- .context
- .end
- .summary::<D::TextDimension>(buffer);
- let excerpt_range = excerpt_buffer_start..excerpt_buffer_end;
- if excerpt_range.contains(&range.start)
- && excerpt_range.contains(&range.end)
- {
- let excerpt_start = D::from_summary(&cursor.start().text);
-
- let mut start = excerpt_start;
- start += range.start - excerpt_buffer_start;
- let mut end = excerpt_start;
- end += range.end - excerpt_buffer_start;
-
- ranges.push(start..end);
- break;
- }
- }
- }
+ buffer_anchors.push(Anchor::in_buffer(
+ excerpt.path_key_index,
+ buffer_snapshot.anchor_at(range.start, Bias::Left),
+ ));
+ buffer_anchors.push(Anchor::in_buffer(
+ excerpt.path_key_index,
+ buffer_snapshot.anchor_at(range.end, Bias::Right),
+ ));
}
}
+ buffer_anchors.sort_unstable_by(|a, b| a.cmp(b, &snapshot));
- ranges.sort_by_key(|range| range.start);
- ranges
+ snapshot
+ .summaries_for_anchors(buffer_anchors.iter())
+ .as_chunks::<2>()
+ .0
+ .iter()
+ .map(|&[s, e]| s..e)
+ .collect::<Vec<_>>()
}
pub fn merge_transactions(
@@ -79,29 +79,37 @@ fn outline_for_editor(
cx: &mut App,
) -> Option<Task<Vec<OutlineItem<Anchor>>>> {
let multibuffer = editor.read(cx).buffer().read(cx).snapshot(cx);
- let (excerpt_id, _, buffer_snapshot) = multibuffer.as_singleton()?;
+ let buffer_snapshot = multibuffer.as_singleton()?;
let buffer_id = buffer_snapshot.remote_id();
let task = editor.update(cx, |editor, cx| editor.buffer_outline_items(buffer_id, cx));
Some(cx.background_executor().spawn(async move {
task.await
.into_iter()
- .map(|item| OutlineItem {
- depth: item.depth,
- range: Anchor::range_in_buffer(excerpt_id, item.range),
- source_range_for_text: Anchor::range_in_buffer(
- excerpt_id,
- item.source_range_for_text,
- ),
- text: item.text,
- highlight_ranges: item.highlight_ranges,
- name_ranges: item.name_ranges,
- body_range: item
- .body_range
- .map(|r| Anchor::range_in_buffer(excerpt_id, r)),
- annotation_range: item
- .annotation_range
- .map(|r| Anchor::range_in_buffer(excerpt_id, r)),
+ .filter_map(|item| {
+ Some(OutlineItem {
+ depth: item.depth,
+ range: multibuffer.anchor_in_buffer(item.range.start)?
+ ..multibuffer.anchor_in_buffer(item.range.end)?,
+ source_range_for_text: multibuffer
+ .anchor_in_buffer(item.source_range_for_text.start)?
+ ..multibuffer.anchor_in_buffer(item.source_range_for_text.end)?,
+ text: item.text,
+ highlight_ranges: item.highlight_ranges,
+ name_ranges: item.name_ranges,
+ body_range: item.body_range.and_then(|r| {
+ Some(
+ multibuffer.anchor_in_buffer(r.start)?
+ ..multibuffer.anchor_in_buffer(r.end)?,
+ )
+ }),
+ annotation_range: item.annotation_range.and_then(|r| {
+ Some(
+ multibuffer.anchor_in_buffer(r.start)?
+ ..multibuffer.anchor_in_buffer(r.end)?,
+ )
+ }),
+ })
})
.collect()
}))
@@ -1,11 +1,11 @@
mod outline_panel_settings;
use anyhow::Context as _;
-use collections::{BTreeSet, HashMap, HashSet, hash_map};
+use collections::{BTreeSet, HashMap, HashSet};
use db::kvp::KeyValueStore;
use editor::{
- AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, ExcerptId, ExcerptRange,
- MultiBufferSnapshot, RangeToAnchorExt, SelectionEffects,
+ AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, ExcerptRange, MultiBufferSnapshot,
+ RangeToAnchorExt, SelectionEffects,
display_map::ToDisplayPoint,
items::{entry_git_aware_label_color, entry_label_color},
scroll::{Autoscroll, ScrollAnchor},
@@ -129,12 +129,12 @@ pub struct OutlinePanel {
selected_entry: SelectedEntry,
active_item: Option<ActiveItem>,
_subscriptions: Vec<Subscription>,
- new_entries_for_fs_update: HashSet<ExcerptId>,
+ new_entries_for_fs_update: HashSet<BufferId>,
fs_entries_update_task: Task<()>,
cached_entries_update_task: Task<()>,
reveal_selection_task: Task<anyhow::Result<()>>,
outline_fetch_tasks: HashMap<BufferId, Task<()>>,
- excerpts: HashMap<BufferId, HashMap<ExcerptId, Excerpt>>,
+ buffers: HashMap<BufferId, BufferOutlines>,
cached_entries: Vec<CachedEntry>,
filter_editor: Entity<Editor>,
mode: ItemsDisplayMode,
@@ -334,42 +334,41 @@ enum CollapsedEntry {
Dir(WorktreeId, ProjectEntryId),
File(WorktreeId, BufferId),
ExternalFile(BufferId),
- Excerpt(BufferId, ExcerptId),
- Outline(BufferId, ExcerptId, Range<Anchor>),
+ Excerpt(ExcerptRange<Anchor>),
+ Outline(Range<Anchor>),
}
-#[derive(Debug)]
-struct Excerpt {
- range: ExcerptRange<language::Anchor>,
- outlines: ExcerptOutlines,
+struct BufferOutlines {
+ excerpts: Vec<ExcerptRange<Anchor>>,
+ outlines: OutlineState,
}
-impl Excerpt {
+impl BufferOutlines {
fn invalidate_outlines(&mut self) {
- if let ExcerptOutlines::Outlines(valid_outlines) = &mut self.outlines {
- self.outlines = ExcerptOutlines::Invalidated(std::mem::take(valid_outlines));
+ if let OutlineState::Outlines(valid_outlines) = &mut self.outlines {
+ self.outlines = OutlineState::Invalidated(std::mem::take(valid_outlines));
}
}
fn iter_outlines(&self) -> impl Iterator<Item = &Outline> {
match &self.outlines {
- ExcerptOutlines::Outlines(outlines) => outlines.iter(),
- ExcerptOutlines::Invalidated(outlines) => outlines.iter(),
- ExcerptOutlines::NotFetched => [].iter(),
+ OutlineState::Outlines(outlines) => outlines.iter(),
+ OutlineState::Invalidated(outlines) => outlines.iter(),
+ OutlineState::NotFetched => [].iter(),
}
}
fn should_fetch_outlines(&self) -> bool {
match &self.outlines {
- ExcerptOutlines::Outlines(_) => false,
- ExcerptOutlines::Invalidated(_) => true,
- ExcerptOutlines::NotFetched => true,
+ OutlineState::Outlines(_) => false,
+ OutlineState::Invalidated(_) => true,
+ OutlineState::NotFetched => true,
}
}
}
#[derive(Debug)]
-enum ExcerptOutlines {
+enum OutlineState {
Outlines(Vec<Outline>),
Invalidated(Vec<Outline>),
NotFetched,
@@ -536,54 +535,24 @@ impl SearchData {
}
}
-#[derive(Clone, Debug, PartialEq, Eq, Hash)]
-struct OutlineEntryExcerpt {
- id: ExcerptId,
- buffer_id: BufferId,
- range: ExcerptRange<language::Anchor>,
-}
-
-#[derive(Clone, Debug, Eq)]
-struct OutlineEntryOutline {
- buffer_id: BufferId,
- excerpt_id: ExcerptId,
- outline: Outline,
-}
-
-impl PartialEq for OutlineEntryOutline {
- fn eq(&self, other: &Self) -> bool {
- self.buffer_id == other.buffer_id
- && self.excerpt_id == other.excerpt_id
- && self.outline.depth == other.outline.depth
- && self.outline.range == other.outline.range
- && self.outline.text == other.outline.text
- }
-}
-
-impl Hash for OutlineEntryOutline {
- fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
- (
- self.buffer_id,
- self.excerpt_id,
- self.outline.depth,
- &self.outline.range,
- &self.outline.text,
- )
- .hash(state);
- }
-}
-
#[derive(Clone, Debug, PartialEq, Eq)]
enum OutlineEntry {
- Excerpt(OutlineEntryExcerpt),
- Outline(OutlineEntryOutline),
+ Excerpt(ExcerptRange<Anchor>),
+ Outline(Outline),
}
impl OutlineEntry {
- fn ids(&self) -> (BufferId, ExcerptId) {
+ fn buffer_id(&self) -> BufferId {
match self {
- OutlineEntry::Excerpt(excerpt) => (excerpt.buffer_id, excerpt.id),
- OutlineEntry::Outline(outline) => (outline.buffer_id, outline.excerpt_id),
+ OutlineEntry::Excerpt(excerpt) => excerpt.context.start.buffer_id,
+ OutlineEntry::Outline(outline) => outline.range.start.buffer_id,
+ }
+ }
+
+ fn range(&self) -> Range<Anchor> {
+ match self {
+ OutlineEntry::Excerpt(excerpt) => excerpt.context.clone(),
+ OutlineEntry::Outline(outline) => outline.range.clone(),
}
}
}
@@ -593,7 +562,7 @@ struct FsEntryFile {
worktree_id: WorktreeId,
entry: GitEntry,
buffer_id: BufferId,
- excerpts: Vec<ExcerptId>,
+ excerpts: Vec<ExcerptRange<language::Anchor>>,
}
impl PartialEq for FsEntryFile {
@@ -631,7 +600,7 @@ impl Hash for FsEntryDirectory {
#[derive(Debug, Clone, Eq)]
struct FsEntryExternalFile {
buffer_id: BufferId,
- excerpts: Vec<ExcerptId>,
+ excerpts: Vec<ExcerptRange<language::Anchor>>,
}
impl PartialEq for FsEntryExternalFile {
@@ -787,10 +756,8 @@ impl OutlinePanel {
if ¤t_theme != new_theme {
outline_panel_settings = *new_settings;
current_theme = new_theme.clone();
- for excerpts in outline_panel.excerpts.values_mut() {
- for excerpt in excerpts.values_mut() {
- excerpt.invalidate_outlines();
- }
+ for buffer in outline_panel.buffers.values_mut() {
+ buffer.invalidate_outlines();
}
outlines_invalidated = true;
let update_cached_items = outline_panel.update_non_fs_items(window, cx);
@@ -809,30 +776,23 @@ impl OutlinePanel {
let new_depth = new_settings.expand_outlines_with_depth;
- for (buffer_id, excerpts) in &outline_panel.excerpts {
- for (excerpt_id, excerpt) in excerpts {
- if let ExcerptOutlines::Outlines(outlines) = &excerpt.outlines {
- for outline in outlines {
- if outline_panel
- .outline_children_cache
- .get(buffer_id)
- .and_then(|children_map| {
- let key =
- (outline.range.clone(), outline.depth);
- children_map.get(&key)
- })
- .copied()
- .unwrap_or(false)
- && (new_depth == 0 || outline.depth >= new_depth)
- {
- outline_panel.collapsed_entries.insert(
- CollapsedEntry::Outline(
- *buffer_id,
- *excerpt_id,
- outline.range.clone(),
- ),
- );
- }
+ for (buffer_id, buffer) in &outline_panel.buffers {
+ if let OutlineState::Outlines(outlines) = &buffer.outlines {
+ for outline in outlines {
+ if outline_panel
+ .outline_children_cache
+ .get(buffer_id)
+ .and_then(|children_map| {
+ let key = (outline.range.clone(), outline.depth);
+ children_map.get(&key)
+ })
+ .copied()
+ .unwrap_or(false)
+ && (new_depth == 0 || outline.depth >= new_depth)
+ {
+ outline_panel.collapsed_entries.insert(
+ CollapsedEntry::Outline(outline.range.clone()),
+ );
}
}
}
@@ -852,7 +812,7 @@ impl OutlinePanel {
if !outlines_invalidated {
let new_document_symbols = outline_panel
- .excerpts
+ .buffers
.keys()
.filter_map(|buffer_id| {
let buffer = outline_panel
@@ -867,10 +827,8 @@ impl OutlinePanel {
.collect();
if new_document_symbols != document_symbols_by_buffer {
document_symbols_by_buffer = new_document_symbols;
- for excerpts in outline_panel.excerpts.values_mut() {
- for excerpt in excerpts.values_mut() {
- excerpt.invalidate_outlines();
- }
+ for buffer in outline_panel.buffers.values_mut() {
+ buffer.invalidate_outlines();
}
let update_cached_items = outline_panel.update_non_fs_items(window, cx);
if update_cached_items {
@@ -914,7 +872,7 @@ impl OutlinePanel {
cached_entries_update_task: Task::ready(()),
reveal_selection_task: Task::ready(Ok(())),
outline_fetch_tasks: HashMap::default(),
- excerpts: HashMap::default(),
+ buffers: HashMap::default(),
cached_entries: Vec::new(),
_subscriptions: vec![
settings_subscription,
@@ -1110,16 +1068,13 @@ impl OutlinePanel {
PanelEntry::Fs(FsEntry::ExternalFile(file)) => {
change_selection = false;
scroll_to_buffer = Some(file.buffer_id);
- multi_buffer_snapshot.excerpts().find_map(
- |(excerpt_id, buffer_snapshot, excerpt_range)| {
- if buffer_snapshot.remote_id() == file.buffer_id {
- multi_buffer_snapshot
- .anchor_in_excerpt(excerpt_id, excerpt_range.context.start)
- } else {
- None
- }
- },
- )
+ multi_buffer_snapshot.excerpts().find_map(|excerpt_range| {
+ if excerpt_range.context.start.buffer_id == file.buffer_id {
+ multi_buffer_snapshot.anchor_in_excerpt(excerpt_range.context.start)
+ } else {
+ None
+ }
+ })
}
PanelEntry::Fs(FsEntry::File(file)) => {
@@ -1132,26 +1087,20 @@ impl OutlinePanel {
.and_then(|path| project.get_open_buffer(&path, cx))
})
.map(|buffer| {
- active_multi_buffer
- .read(cx)
- .excerpts_for_buffer(buffer.read(cx).remote_id(), cx)
+ multi_buffer_snapshot.excerpts_for_buffer(buffer.read(cx).remote_id())
})
- .and_then(|excerpts| {
- let (excerpt_id, _, excerpt_range) = excerpts.first()?;
- multi_buffer_snapshot
- .anchor_in_excerpt(*excerpt_id, excerpt_range.context.start)
+ .and_then(|mut excerpts| {
+ let excerpt_range = excerpts.next()?;
+ multi_buffer_snapshot.anchor_in_excerpt(excerpt_range.context.start)
})
}
PanelEntry::Outline(OutlineEntry::Outline(outline)) => multi_buffer_snapshot
- .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.start)
- .or_else(|| {
- multi_buffer_snapshot
- .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.end)
- }),
+ .anchor_in_excerpt(outline.range.start)
+ .or_else(|| multi_buffer_snapshot.anchor_in_excerpt(outline.range.end)),
PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
change_selection = false;
change_focus = false;
- multi_buffer_snapshot.anchor_in_excerpt(excerpt.id, excerpt.range.context.start)
+ multi_buffer_snapshot.anchor_in_excerpt(excerpt.context.start)
}
PanelEntry::Search(search_entry) => Some(search_entry.match_range.start),
};
@@ -1359,12 +1308,12 @@ impl OutlinePanel {
PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
previous_entries.find(|entry| match entry {
PanelEntry::Fs(FsEntry::File(file)) => {
- file.buffer_id == excerpt.buffer_id
- && file.excerpts.contains(&excerpt.id)
+ file.buffer_id == excerpt.context.start.buffer_id
+ && file.excerpts.contains(&excerpt)
}
PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => {
- external_file.buffer_id == excerpt.buffer_id
- && external_file.excerpts.contains(&excerpt.id)
+ external_file.buffer_id == excerpt.context.start.buffer_id
+ && external_file.excerpts.contains(&excerpt)
}
_ => false,
})
@@ -1372,8 +1321,16 @@ impl OutlinePanel {
PanelEntry::Outline(OutlineEntry::Outline(outline)) => {
previous_entries.find(|entry| {
if let PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) = entry {
- outline.buffer_id == excerpt.buffer_id
- && outline.excerpt_id == excerpt.id
+ if outline.range.start.buffer_id != excerpt.context.start.buffer_id {
+ return false;
+ }
+ let Some(buffer_snapshot) =
+ self.buffer_snapshot_for_id(outline.range.start.buffer_id, cx)
+ else {
+ return false;
+ };
+ excerpt.contains(&outline.range.start, &buffer_snapshot)
+ || excerpt.contains(&outline.range.end, &buffer_snapshot)
} else {
false
}
@@ -1584,13 +1541,11 @@ impl OutlinePanel {
Some(CollapsedEntry::ExternalFile(external_file.buffer_id))
}
PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
- Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id))
+ Some(CollapsedEntry::Excerpt(excerpt.clone()))
+ }
+ PanelEntry::Outline(OutlineEntry::Outline(outline)) => {
+ Some(CollapsedEntry::Outline(outline.range.clone()))
}
- PanelEntry::Outline(OutlineEntry::Outline(outline)) => Some(CollapsedEntry::Outline(
- outline.buffer_id,
- outline.excerpt_id,
- outline.outline.range.clone(),
- )),
PanelEntry::Search(_) => return,
};
let Some(collapsed_entry) = entry_to_expand else {
@@ -1691,14 +1646,10 @@ impl OutlinePanel {
}
PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self
.collapsed_entries
- .insert(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)),
- PanelEntry::Outline(OutlineEntry::Outline(outline)) => {
- self.collapsed_entries.insert(CollapsedEntry::Outline(
- outline.buffer_id,
- outline.excerpt_id,
- outline.outline.range.clone(),
- ))
- }
+ .insert(CollapsedEntry::Excerpt(excerpt.clone())),
+ PanelEntry::Outline(OutlineEntry::Outline(outline)) => self
+ .collapsed_entries
+ .insert(CollapsedEntry::Outline(outline.range.clone())),
PanelEntry::Search(_) => false,
};
@@ -1753,31 +1704,26 @@ impl OutlinePanel {
}
}
- for (&buffer_id, excerpts) in &self.excerpts {
- for (&excerpt_id, excerpt) in excerpts {
- match &excerpt.outlines {
- ExcerptOutlines::Outlines(outlines) => {
- for outline in outlines {
- to_uncollapse.insert(CollapsedEntry::Outline(
- buffer_id,
- excerpt_id,
- outline.range.clone(),
- ));
- }
+ for (_buffer_id, buffer) in &self.buffers {
+ match &buffer.outlines {
+ OutlineState::Outlines(outlines) => {
+ for outline in outlines {
+ to_uncollapse.insert(CollapsedEntry::Outline(outline.range.clone()));
}
- ExcerptOutlines::Invalidated(outlines) => {
- for outline in outlines {
- to_uncollapse.insert(CollapsedEntry::Outline(
- buffer_id,
- excerpt_id,
- outline.range.clone(),
- ));
- }
+ }
+ OutlineState::Invalidated(outlines) => {
+ for outline in outlines {
+ to_uncollapse.insert(CollapsedEntry::Outline(outline.range.clone()));
}
- ExcerptOutlines::NotFetched => {}
}
- to_uncollapse.insert(CollapsedEntry::Excerpt(buffer_id, excerpt_id));
+ OutlineState::NotFetched => {}
}
+ to_uncollapse.extend(
+ buffer
+ .excerpts
+ .iter()
+ .map(|excerpt| CollapsedEntry::Excerpt(excerpt.clone())),
+ );
}
for cached in &self.cached_entries {
@@ -1844,14 +1790,10 @@ impl OutlinePanel {
..
}) => Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)),
PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
- Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id))
+ Some(CollapsedEntry::Excerpt(excerpt.clone()))
}
PanelEntry::Outline(OutlineEntry::Outline(outline)) => {
- Some(CollapsedEntry::Outline(
- outline.buffer_id,
- outline.excerpt_id,
- outline.outline.range.clone(),
- ))
+ Some(CollapsedEntry::Outline(outline.range.clone()))
}
PanelEntry::Search(_) => None,
},
@@ -1939,17 +1881,13 @@ impl OutlinePanel {
}
}
PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
- let collapsed_entry = CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id);
+ let collapsed_entry = CollapsedEntry::Excerpt(excerpt.clone());
if !self.collapsed_entries.remove(&collapsed_entry) {
self.collapsed_entries.insert(collapsed_entry);
}
}
PanelEntry::Outline(OutlineEntry::Outline(outline)) => {
- let collapsed_entry = CollapsedEntry::Outline(
- outline.buffer_id,
- outline.excerpt_id,
- outline.outline.range.clone(),
- );
+ let collapsed_entry = CollapsedEntry::Outline(outline.range.clone());
if !self.collapsed_entries.remove(&collapsed_entry) {
self.collapsed_entries.insert(collapsed_entry);
}
@@ -2103,6 +2041,8 @@ impl OutlinePanel {
let project = self.project.clone();
self.reveal_selection_task = cx.spawn_in(window, async move |outline_panel, cx| {
cx.background_executor().timer(UPDATE_DEBOUNCE).await;
+ let multibuffer_snapshot =
+ editor.read_with(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx));
let entry_with_selection =
outline_panel.update_in(cx, |outline_panel, window, cx| {
outline_panel.location_for_editor_selection(&editor, window, cx)
@@ -2132,14 +2072,28 @@ impl OutlinePanel {
})
}),
PanelEntry::Outline(outline_entry) => {
- let (buffer_id, excerpt_id) = outline_entry.ids();
+ let buffer_id = outline_entry.buffer_id();
+ let outline_range = outline_entry.range();
outline_panel.update(cx, |outline_panel, cx| {
outline_panel
.collapsed_entries
.remove(&CollapsedEntry::ExternalFile(buffer_id));
- outline_panel
- .collapsed_entries
- .remove(&CollapsedEntry::Excerpt(buffer_id, excerpt_id));
+ if let Some(buffer_snapshot) =
+ outline_panel.buffer_snapshot_for_id(buffer_id, cx)
+ {
+ outline_panel.collapsed_entries.retain(|entry| match entry {
+ CollapsedEntry::Excerpt(excerpt_range) => {
+ let intersects = excerpt_range.context.start.buffer_id
+ == buffer_id
+ && (excerpt_range
+ .contains(&outline_range.start, &buffer_snapshot)
+ || excerpt_range
+ .contains(&outline_range.end, &buffer_snapshot));
+ !intersects
+ }
+ _ => true,
+ });
+ }
let project = outline_panel.project.read(cx);
let entry_id = project
.buffer_for_id(buffer_id, cx)
@@ -2160,11 +2114,9 @@ impl OutlinePanel {
})?
}
PanelEntry::Fs(FsEntry::ExternalFile(..)) => None,
- PanelEntry::Search(SearchEntry { match_range, .. }) => match_range
- .start
- .text_anchor
- .buffer_id
- .or(match_range.end.text_anchor.buffer_id)
+ PanelEntry::Search(SearchEntry { match_range, .. }) => multibuffer_snapshot
+ .anchor_to_buffer_anchor(match_range.start)
+ .map(|(anchor, _)| anchor.buffer_id)
.map(|buffer_id| {
outline_panel.update(cx, |outline_panel, cx| {
outline_panel
@@ -2246,30 +2198,30 @@ impl OutlinePanel {
fn render_excerpt(
&self,
- excerpt: &OutlineEntryExcerpt,
+ excerpt: &ExcerptRange<Anchor>,
depth: usize,
window: &mut Window,
cx: &mut Context<OutlinePanel>,
) -> Option<Stateful<Div>> {
- let item_id = ElementId::from(excerpt.id.to_proto() as usize);
+ let item_id = ElementId::from(format!("{excerpt:?}"));
let is_active = match self.selected_entry() {
Some(PanelEntry::Outline(OutlineEntry::Excerpt(selected_excerpt))) => {
- selected_excerpt.buffer_id == excerpt.buffer_id && selected_excerpt.id == excerpt.id
+ selected_excerpt == excerpt
}
_ => false,
};
let has_outlines = self
- .excerpts
- .get(&excerpt.buffer_id)
- .and_then(|excerpts| match &excerpts.get(&excerpt.id)?.outlines {
- ExcerptOutlines::Outlines(outlines) => Some(outlines),
- ExcerptOutlines::Invalidated(outlines) => Some(outlines),
- ExcerptOutlines::NotFetched => None,
+ .buffers
+ .get(&excerpt.context.start.buffer_id)
+ .and_then(|buffer| match &buffer.outlines {
+ OutlineState::Outlines(outlines) => Some(outlines),
+ OutlineState::Invalidated(outlines) => Some(outlines),
+ OutlineState::NotFetched => None,
})
.is_some_and(|outlines| !outlines.is_empty());
let is_expanded = !self
.collapsed_entries
- .contains(&CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id));
+ .contains(&CollapsedEntry::Excerpt(excerpt.clone()));
let color = entry_label_color(is_active);
let icon = if has_outlines {
FileIcons::get_chevron_icon(is_expanded, cx)
@@ -2279,7 +2231,7 @@ impl OutlinePanel {
}
.unwrap_or_else(empty_icon);
- let label = self.excerpt_label(excerpt.buffer_id, &excerpt.range, cx)?;
+ let label = self.excerpt_label(&excerpt, cx)?;
let label_element = Label::new(label)
.single_line()
.color(color)
@@ -2297,13 +2249,8 @@ impl OutlinePanel {
))
}
- fn excerpt_label(
- &self,
- buffer_id: BufferId,
- range: &ExcerptRange<language::Anchor>,
- cx: &App,
- ) -> Option<String> {
- let buffer_snapshot = self.buffer_snapshot_for_id(buffer_id, cx)?;
+ fn excerpt_label(&self, range: &ExcerptRange<language::Anchor>, cx: &App) -> Option<String> {
+ let buffer_snapshot = self.buffer_snapshot_for_id(range.context.start.buffer_id, cx)?;
let excerpt_range = range.context.to_point(&buffer_snapshot);
Some(format!(
"Lines {}- {}",
@@ -2314,19 +2261,19 @@ impl OutlinePanel {
fn render_outline(
&self,
- outline: &OutlineEntryOutline,
+ outline: &Outline,
depth: usize,
string_match: Option<&StringMatch>,
window: &mut Window,
cx: &mut Context<Self>,
) -> Stateful<Div> {
let item_id = ElementId::from(SharedString::from(format!(
- "{:?}|{:?}{:?}|{:?}",
- outline.buffer_id, outline.excerpt_id, outline.outline.range, &outline.outline.text,
+ "{:?}|{:?}",
+ outline.range, &outline.text,
)));
let label_element = outline::render_item(
- &outline.outline,
+ &outline,
string_match
.map(|string_match| string_match.ranges().collect::<Vec<_>>())
.unwrap_or_default(),
@@ -2335,26 +2282,22 @@ impl OutlinePanel {
.into_any_element();
let is_active = match self.selected_entry() {
- Some(PanelEntry::Outline(OutlineEntry::Outline(selected))) => {
- outline == selected && outline.outline == selected.outline
- }
+ Some(PanelEntry::Outline(OutlineEntry::Outline(selected))) => outline == selected,
_ => false,
};
let has_children = self
.outline_children_cache
- .get(&outline.buffer_id)
+ .get(&outline.range.start.buffer_id)
.and_then(|children_map| {
- let key = (outline.outline.range.clone(), outline.outline.depth);
+ let key = (outline.range.clone(), outline.depth);
children_map.get(&key)
})
.copied()
.unwrap_or(false);
- let is_expanded = !self.collapsed_entries.contains(&CollapsedEntry::Outline(
- outline.buffer_id,
- outline.excerpt_id,
- outline.outline.range.clone(),
- ));
+ let is_expanded = !self
+ .collapsed_entries
+ .contains(&CollapsedEntry::Outline(outline.range.clone()));
let icon = if has_children {
FileIcons::get_chevron_icon(is_expanded, cx)
@@ -2784,7 +2727,7 @@ impl OutlinePanel {
let mut new_collapsed_entries = HashSet::default();
let mut new_unfolded_dirs = HashMap::default();
let mut root_entries = HashSet::default();
- let mut new_excerpts = HashMap::<BufferId, HashMap<ExcerptId, Excerpt>>::default();
+ let mut new_buffers = HashMap::<BufferId, BufferOutlines>::default();
let Ok(buffer_excerpts) = outline_panel.update(cx, |outline_panel, cx| {
let git_store = outline_panel.project.read(cx).git_store().clone();
new_collapsed_entries = outline_panel.collapsed_entries.clone();
@@ -2793,13 +2736,18 @@ impl OutlinePanel {
multi_buffer_snapshot.excerpts().fold(
HashMap::default(),
- |mut buffer_excerpts, (excerpt_id, buffer_snapshot, excerpt_range)| {
+ |mut buffer_excerpts, excerpt_range| {
+ let Some(buffer_snapshot) = multi_buffer_snapshot
+ .buffer_for_id(excerpt_range.context.start.buffer_id)
+ else {
+ return buffer_excerpts;
+ };
let buffer_id = buffer_snapshot.remote_id();
let file = File::from_dyn(buffer_snapshot.file());
let entry_id = file.and_then(|file| file.project_entry_id());
let worktree = file.map(|file| file.worktree.read(cx).snapshot());
- let is_new = new_entries.contains(&excerpt_id)
- || !outline_panel.excerpts.contains_key(&buffer_id);
+ let is_new = new_entries.contains(&buffer_id)
+ || !outline_panel.buffers.contains_key(&buffer_id);
let is_folded = active_editor.read(cx).is_buffer_folded(buffer_id, cx);
let status = git_store
.read(cx)
@@ -2813,29 +2761,28 @@ impl OutlinePanel {
(is_new, is_folded, Vec::new(), entry_id, worktree, status)
})
.2
- .push(excerpt_id);
+ .push(excerpt_range.clone());
- let outlines = match outline_panel
- .excerpts
- .get(&buffer_id)
- .and_then(|excerpts| excerpts.get(&excerpt_id))
- {
- Some(old_excerpt) => match &old_excerpt.outlines {
- ExcerptOutlines::Outlines(outlines) => {
- ExcerptOutlines::Outlines(outlines.clone())
+ new_buffers
+ .entry(buffer_id)
+ .or_insert_with(|| {
+ let outlines = match outline_panel.buffers.get(&buffer_id) {
+ Some(old_buffer) => match &old_buffer.outlines {
+ OutlineState::Outlines(outlines) => {
+ OutlineState::Outlines(outlines.clone())
+ }
+ OutlineState::Invalidated(_) => OutlineState::NotFetched,
+ OutlineState::NotFetched => OutlineState::NotFetched,
+ },
+ None => OutlineState::NotFetched,
+ };
+ BufferOutlines {
+ outlines,
+ excerpts: Vec::new(),
}
- ExcerptOutlines::Invalidated(_) => ExcerptOutlines::NotFetched,
- ExcerptOutlines::NotFetched => ExcerptOutlines::NotFetched,
- },
- None => ExcerptOutlines::NotFetched,
- };
- new_excerpts.entry(buffer_id).or_default().insert(
- excerpt_id,
- Excerpt {
- range: excerpt_range,
- outlines,
- },
- );
+ })
+ .excerpts
+ .push(excerpt_range);
buffer_excerpts
},
)
@@ -2856,7 +2803,7 @@ impl OutlinePanel {
BTreeMap::<WorktreeId, HashMap<ProjectEntryId, GitEntry>>::default();
let mut worktree_excerpts = HashMap::<
WorktreeId,
- HashMap<ProjectEntryId, (BufferId, Vec<ExcerptId>)>,
+ HashMap<ProjectEntryId, (BufferId, Vec<ExcerptRange<Anchor>>)>,
>::default();
let mut external_excerpts = HashMap::default();
@@ -3134,7 +3081,7 @@ impl OutlinePanel {
outline_panel
.update_in(cx, |outline_panel, window, cx| {
outline_panel.new_entries_for_fs_update.clear();
- outline_panel.excerpts = new_excerpts;
+ outline_panel.buffers = new_buffers;
outline_panel.collapsed_entries = new_collapsed_entries;
outline_panel.unfolded_dirs = new_unfolded_dirs;
outline_panel.fs_entries = new_fs_entries;
@@ -3144,7 +3091,7 @@ impl OutlinePanel {
// Only update cached entries if we don't have outlines to fetch
// If we do have outlines to fetch, let fetch_outdated_outlines handle the update
- if outline_panel.excerpt_fetch_ranges(cx).is_empty() {
+ if outline_panel.buffers_to_fetch().is_empty() {
outline_panel.update_cached_entries(debounce, window, cx);
}
@@ -3192,8 +3139,15 @@ impl OutlinePanel {
item_handle: new_active_item.downgrade_item(),
active_editor: new_active_editor.downgrade(),
});
- self.new_entries_for_fs_update
- .extend(new_active_editor.read(cx).buffer().read(cx).excerpt_ids());
+ self.new_entries_for_fs_update.extend(
+ new_active_editor
+ .read(cx)
+ .buffer()
+ .read(cx)
+ .snapshot(cx)
+ .excerpts()
+ .map(|excerpt| excerpt.context.start.buffer_id),
+ );
self.selected_entry.invalidate();
self.update_fs_entries(new_active_editor, None, window, cx);
}
@@ -3211,7 +3165,7 @@ impl OutlinePanel {
self.fs_entries.clear();
self.fs_entries_depth.clear();
self.fs_children_count.clear();
- self.excerpts.clear();
+ self.buffers.clear();
self.cached_entries = Vec::new();
self.selected_entry = SelectedEntry::None;
self.pinned = false;
@@ -3225,23 +3179,14 @@ impl OutlinePanel {
window: &mut Window,
cx: &mut Context<Self>,
) -> Option<PanelEntry> {
- let selection = editor.update(cx, |editor, cx| {
- editor
- .selections
- .newest::<language::Point>(&editor.display_snapshot(cx))
- .head()
- });
let editor_snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx));
let multi_buffer = editor.read(cx).buffer();
let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx);
- let (excerpt_id, buffer, _) = editor
- .read(cx)
- .buffer()
- .read(cx)
- .excerpt_containing(selection, cx)?;
- let buffer_id = buffer.read(cx).remote_id();
+ let anchor = editor.update(cx, |editor, _| editor.selections.newest_anchor().head());
+ let selection_display_point = anchor.to_display_point(&editor_snapshot);
+ let (anchor, _) = multi_buffer_snapshot.anchor_to_buffer_anchor(anchor)?;
- if editor.read(cx).is_buffer_folded(buffer_id, cx) {
+ if editor.read(cx).is_buffer_folded(anchor.buffer_id, cx) {
return self
.fs_entries
.iter()
@@ -3254,14 +3199,12 @@ impl OutlinePanel {
| FsEntry::ExternalFile(FsEntryExternalFile {
buffer_id: other_buffer_id,
..
- }) => buffer_id == *other_buffer_id,
+ }) => anchor.buffer_id == *other_buffer_id,
})
.cloned()
.map(PanelEntry::Fs);
}
- let selection_display_point = selection.to_display_point(&editor_snapshot);
-
match &self.mode {
ItemsDisplayMode::Search(search_state) => search_state
.matches
@@ -3298,32 +3241,31 @@ impl OutlinePanel {
})
}),
ItemsDisplayMode::Outline => self.outline_location(
- buffer_id,
- excerpt_id,
+ anchor,
multi_buffer_snapshot,
editor_snapshot,
selection_display_point,
+ cx,
),
}
}
fn outline_location(
&self,
- buffer_id: BufferId,
- excerpt_id: ExcerptId,
+ selection_anchor: Anchor,
multi_buffer_snapshot: editor::MultiBufferSnapshot,
editor_snapshot: editor::EditorSnapshot,
selection_display_point: DisplayPoint,
+ cx: &App,
) -> Option<PanelEntry> {
let excerpt_outlines = self
- .excerpts
- .get(&buffer_id)
- .and_then(|excerpts| excerpts.get(&excerpt_id))
+ .buffers
+ .get(&selection_anchor.buffer_id)
.into_iter()
- .flat_map(|excerpt| excerpt.iter_outlines())
+ .flat_map(|buffer| buffer.iter_outlines())
.flat_map(|outline| {
let range = multi_buffer_snapshot
- .anchor_range_in_excerpt(excerpt_id, outline.range.clone())?;
+ .buffer_anchor_range_to_anchor_range(outline.range.clone())?;
Some((
range.start.to_display_point(&editor_snapshot)
..range.end.to_display_point(&editor_snapshot),
@@ -3411,16 +3353,16 @@ impl OutlinePanel {
.cloned();
let closest_container = match outline_item {
- Some(outline) => PanelEntry::Outline(OutlineEntry::Outline(OutlineEntryOutline {
- buffer_id,
- excerpt_id,
- outline,
- })),
+ Some(outline) => PanelEntry::Outline(OutlineEntry::Outline(outline)),
None => {
self.cached_entries.iter().rev().find_map(|cached_entry| {
match &cached_entry.entry {
PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => {
- if excerpt.buffer_id == buffer_id && excerpt.id == excerpt_id {
+ if excerpt.context.start.buffer_id == selection_anchor.buffer_id
+ && let Some(buffer_snapshot) =
+ self.buffer_snapshot_for_id(excerpt.context.start.buffer_id, cx)
+ && excerpt.contains(&selection_anchor, &buffer_snapshot)
+ {
Some(cached_entry.entry.clone())
} else {
None
@@ -3430,6 +3372,7 @@ impl OutlinePanel {
FsEntry::ExternalFile(FsEntryExternalFile {
buffer_id: file_buffer_id,
excerpts: file_excerpts,
+ ..
})
| FsEntry::File(FsEntryFile {
buffer_id: file_buffer_id,
@@ -3437,7 +3380,13 @@ impl OutlinePanel {
..
}),
) => {
- if file_buffer_id == &buffer_id && file_excerpts.contains(&excerpt_id) {
+ if *file_buffer_id == selection_anchor.buffer_id
+ && let Some(buffer_snapshot) =
+ self.buffer_snapshot_for_id(*file_buffer_id, cx)
+ && file_excerpts.iter().any(|excerpt| {
+ excerpt.contains(&selection_anchor, &buffer_snapshot)
+ })
+ {
Some(cached_entry.entry.clone())
} else {
None
@@ -3452,18 +3401,17 @@ impl OutlinePanel {
}
fn fetch_outdated_outlines(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- let excerpt_fetch_ranges = self.excerpt_fetch_ranges(cx);
- if excerpt_fetch_ranges.is_empty() {
+ let buffers_to_fetch = self.buffers_to_fetch();
+ if buffers_to_fetch.is_empty() {
return;
}
let first_update = Arc::new(AtomicBool::new(true));
- for (buffer_id, (_buffer_snapshot, excerpt_ranges)) in excerpt_fetch_ranges {
+ for buffer_id in buffers_to_fetch {
let outline_task = self.active_editor().map(|editor| {
editor.update(cx, |editor, cx| editor.buffer_outline_items(buffer_id, cx))
});
- let excerpt_ids = excerpt_ranges.keys().copied().collect::<Vec<_>>();
let first_update = first_update.clone();
self.outline_fetch_tasks.insert(
@@ -3498,40 +3446,26 @@ impl OutlinePanel {
Some(UPDATE_DEBOUNCE)
};
- for excerpt_id in &excerpt_ids {
- if let Some(excerpt) = outline_panel
- .excerpts
- .entry(buffer_id)
- .or_default()
- .get_mut(excerpt_id)
+ if let Some(buffer) = outline_panel.buffers.get_mut(&buffer_id) {
+ buffer.outlines = OutlineState::Outlines(fetched_outlines.clone());
+
+ if let Some(default_depth) = pending_default_depth
+ && let OutlineState::Outlines(outlines) = &buffer.outlines
{
- excerpt.outlines =
- ExcerptOutlines::Outlines(fetched_outlines.clone());
-
- if let Some(default_depth) = pending_default_depth
- && let ExcerptOutlines::Outlines(outlines) =
- &excerpt.outlines
- {
- outlines
- .iter()
- .filter(|outline| {
- (default_depth == 0
- || outline.depth >= default_depth)
- && outlines_with_children.contains(&(
- outline.range.clone(),
- outline.depth,
- ))
- })
- .for_each(|outline| {
- outline_panel.collapsed_entries.insert(
- CollapsedEntry::Outline(
- buffer_id,
- *excerpt_id,
- outline.range.clone(),
- ),
- );
- });
- }
+ outlines
+ .iter()
+ .filter(|outline| {
+ (default_depth == 0 || outline.depth >= default_depth)
+ && outlines_with_children.contains(&(
+ outline.range.clone(),
+ outline.depth,
+ ))
+ })
+ .for_each(|outline| {
+ outline_panel.collapsed_entries.insert(
+ CollapsedEntry::Outline(outline.range.clone()),
+ );
+ });
}
}
@@ -121,6 +121,9 @@ pub trait PickerDelegate: Sized + 'static {
) -> bool {
true
}
+ fn select_on_hover(&self) -> bool {
+ true
+ }
// Allows binding some optional effect to when the selection changes.
fn selected_index_changed(
@@ -788,12 +791,14 @@ impl<D: PickerDelegate> Picker<D> {
this.handle_click(ix, event.modifiers.platform, window, cx)
}),
)
- .on_hover(cx.listener(move |this, hovered: &bool, window, cx| {
- if *hovered {
- this.set_selected_index(ix, None, false, window, cx);
- cx.notify();
- }
- }))
+ .when(self.delegate.select_on_hover(), |this| {
+ this.on_hover(cx.listener(move |this, hovered: &bool, window, cx| {
+ if *hovered {
+ this.set_selected_index(ix, None, false, window, cx);
+ cx.notify();
+ }
+ }))
+ })
.children(self.delegate.render_match(
ix,
ix == self.delegate.selected_index(),
@@ -98,6 +98,7 @@ watch.workspace = true
wax.workspace = true
which.workspace = true
worktree.workspace = true
+zed_credentials_provider.workspace = true
zeroize.workspace = true
zlog.workspace = true
ztracing.workspace = true
@@ -684,7 +684,7 @@ impl ContextServerStore {
let server_url = url.clone();
let id = id.clone();
cx.spawn(async move |_this, cx| {
- let credentials_provider = cx.update(|cx| <dyn CredentialsProvider>::global(cx));
+ let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx));
if let Err(err) = Self::clear_session(&credentials_provider, &server_url, &cx).await
{
log::warn!("{} failed to clear OAuth session on removal: {}", id, err);
@@ -797,8 +797,7 @@ impl ContextServerStore {
if configuration.has_static_auth_header() {
None
} else {
- let credentials_provider =
- cx.update(|cx| <dyn CredentialsProvider>::global(cx));
+ let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx));
let http_client = cx.update(|cx| cx.http_client());
match Self::load_session(&credentials_provider, url, &cx).await {
@@ -1070,7 +1069,7 @@ impl ContextServerStore {
.context("Failed to start OAuth callback server")?;
let http_client = cx.update(|cx| cx.http_client());
- let credentials_provider = cx.update(|cx| <dyn CredentialsProvider>::global(cx));
+ let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx));
let server_url = match configuration.as_ref() {
ContextServerConfiguration::Http { url, .. } => url.clone(),
_ => anyhow::bail!("OAuth authentication only supported for HTTP servers"),
@@ -1233,7 +1232,7 @@ impl ContextServerStore {
self.stop_server(&id, cx)?;
cx.spawn(async move |this, cx| {
- let credentials_provider = cx.update(|cx| <dyn CredentialsProvider>::global(cx));
+ let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx));
if let Err(err) = Self::clear_session(&credentials_provider, &server_url, &cx).await {
log::error!("{} failed to clear OAuth session: {}", id, err);
}
@@ -1451,7 +1450,7 @@ async fn resolve_start_failure(
// (e.g. timeout because the server rejected the token silently). Clear it
// so the next start attempt can get a clean 401 and trigger the auth flow.
if www_authenticate.is_none() {
- let credentials_provider = cx.update(|cx| <dyn CredentialsProvider>::global(cx));
+ let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx));
match ContextServerStore::load_session(&credentials_provider, &server_url, cx).await {
Ok(Some(_)) => {
log::info!("{id} start failed with a cached OAuth session present; clearing it");
@@ -287,6 +287,7 @@ pub struct RepositorySnapshot {
pub original_repo_abs_path: Arc<Path>,
pub path_style: PathStyle,
pub branch: Option<Branch>,
+ pub branch_list: Arc<[Branch]>,
pub head_commit: Option<CommitDetails>,
pub scan_id: u64,
pub merge: MergeDetails,
@@ -328,6 +329,12 @@ pub struct GraphDataResponse<'a> {
pub error: Option<SharedString>,
}
+#[derive(Clone, Debug)]
+enum CreateWorktreeStartPoint {
+ Detached,
+ Branched { name: String },
+}
+
pub struct Repository {
this: WeakEntity<Self>,
snapshot: RepositorySnapshot,
@@ -428,7 +435,8 @@ pub enum GitGraphEvent {
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum RepositoryEvent {
StatusesChanged,
- BranchChanged,
+ HeadChanged,
+ BranchListChanged,
StashEntriesChanged,
GitWorktreeListChanged,
PendingOpsChanged { pending_ops: SumTree<PendingOps> },
@@ -560,6 +568,10 @@ impl GitStore {
client.add_entity_request_handler(Self::handle_run_hook);
client.add_entity_request_handler(Self::handle_reset);
client.add_entity_request_handler(Self::handle_show);
+ client.add_entity_request_handler(Self::handle_create_checkpoint);
+ client.add_entity_request_handler(Self::handle_restore_checkpoint);
+ client.add_entity_request_handler(Self::handle_compare_checkpoints);
+ client.add_entity_request_handler(Self::handle_diff_checkpoints);
client.add_entity_request_handler(Self::handle_load_commit_diff);
client.add_entity_request_handler(Self::handle_file_history);
client.add_entity_request_handler(Self::handle_checkout_files);
@@ -582,6 +594,7 @@ impl GitStore {
client.add_entity_request_handler(Self::handle_create_worktree);
client.add_entity_request_handler(Self::handle_remove_worktree);
client.add_entity_request_handler(Self::handle_rename_worktree);
+ client.add_entity_request_handler(Self::handle_get_head_sha);
}
pub fn is_local(&self) -> bool {
@@ -1799,6 +1812,26 @@ impl GitStore {
&self.repositories
}
+ /// Returns the original (main) repository working directory for the given worktree.
+ /// For normal checkouts this equals the worktree's own path; for linked
+ /// worktrees it points back to the original repo.
+ pub fn original_repo_path_for_worktree(
+ &self,
+ worktree_id: WorktreeId,
+ cx: &App,
+ ) -> Option<Arc<Path>> {
+ self.active_repo_id
+ .iter()
+ .chain(self.worktree_ids.keys())
+ .find(|repo_id| {
+ self.worktree_ids
+ .get(repo_id)
+ .is_some_and(|ids| ids.contains(&worktree_id))
+ })
+ .and_then(|repo_id| self.repositories.get(repo_id))
+ .map(|repo| repo.read(cx).snapshot().original_repo_abs_path)
+ }
+
pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
let status = repo.read(cx).snapshot.status_for_path(&path)?;
@@ -2314,6 +2347,7 @@ impl GitStore {
CommitOptions {
amend: options.amend,
signoff: options.signoff,
+ allow_empty: options.allow_empty,
},
askpass,
cx,
@@ -2380,12 +2414,18 @@ impl GitStore {
let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
let directory = PathBuf::from(envelope.payload.directory);
- let name = envelope.payload.name;
+ let start_point = if envelope.payload.name.is_empty() {
+ CreateWorktreeStartPoint::Detached
+ } else {
+ CreateWorktreeStartPoint::Branched {
+ name: envelope.payload.name,
+ }
+ };
let commit = envelope.payload.commit;
repository_handle
.update(&mut cx, |repository_handle, _| {
- repository_handle.create_worktree(name, directory, commit)
+ repository_handle.create_worktree_with_start_point(start_point, directory, commit)
})
.await??;
@@ -2430,6 +2470,21 @@ impl GitStore {
Ok(proto::Ack {})
}
+ async fn handle_get_head_sha(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::GitGetHeadSha>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::GitGetHeadShaResponse> {
+ let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
+ let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
+
+ let head_sha = repository_handle
+ .update(&mut cx, |repository_handle, _| repository_handle.head_sha())
+ .await??;
+
+ Ok(proto::GitGetHeadShaResponse { sha: head_sha })
+ }
+
async fn handle_get_branches(
this: Entity<Self>,
envelope: TypedEnvelope<proto::GitGetBranches>,
@@ -2599,6 +2654,92 @@ impl GitStore {
})
}
+ async fn handle_create_checkpoint(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::GitCreateCheckpoint>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::GitCreateCheckpointResponse> {
+ let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
+ let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
+
+ let checkpoint = repository_handle
+ .update(&mut cx, |repository, _| repository.checkpoint())
+ .await??;
+
+ Ok(proto::GitCreateCheckpointResponse {
+ commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
+ })
+ }
+
+ async fn handle_restore_checkpoint(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::GitRestoreCheckpoint>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::Ack> {
+ let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
+ let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
+
+ let checkpoint = GitRepositoryCheckpoint {
+ commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?,
+ };
+
+ repository_handle
+ .update(&mut cx, |repository, _| {
+ repository.restore_checkpoint(checkpoint)
+ })
+ .await??;
+
+ Ok(proto::Ack {})
+ }
+
+ async fn handle_compare_checkpoints(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::GitCompareCheckpoints>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::GitCompareCheckpointsResponse> {
+ let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
+ let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
+
+ let left = GitRepositoryCheckpoint {
+ commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?,
+ };
+ let right = GitRepositoryCheckpoint {
+ commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?,
+ };
+
+ let equal = repository_handle
+ .update(&mut cx, |repository, _| {
+ repository.compare_checkpoints(left, right)
+ })
+ .await??;
+
+ Ok(proto::GitCompareCheckpointsResponse { equal })
+ }
+
+ async fn handle_diff_checkpoints(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::GitDiffCheckpoints>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::GitDiffCheckpointsResponse> {
+ let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
+ let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
+
+ let base = GitRepositoryCheckpoint {
+ commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?,
+ };
+ let target = GitRepositoryCheckpoint {
+ commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?,
+ };
+
+ let diff = repository_handle
+ .update(&mut cx, |repository, _| {
+ repository.diff_checkpoints(base, target)
+ })
+ .await??;
+
+ Ok(proto::GitDiffCheckpointsResponse { diff })
+ }
+
async fn handle_load_commit_diff(
this: Entity<Self>,
envelope: TypedEnvelope<proto::LoadCommitDiff>,
@@ -3576,6 +3717,7 @@ impl RepositorySnapshot {
.unwrap_or_else(|| work_directory_abs_path.clone()),
work_directory_abs_path,
branch: None,
+ branch_list: Arc::from([]),
head_commit: None,
scan_id: 0,
merge: Default::default(),
@@ -3938,11 +4080,17 @@ impl Repository {
.shared();
cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
- RepositoryEvent::BranchChanged => {
+ RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => {
if this.scan_id > 1 {
this.initial_graph_data.clear();
}
}
+ RepositoryEvent::StashEntriesChanged => {
+ if this.scan_id > 1 {
+ this.initial_graph_data
+ .retain(|(log_source, _), _| *log_source != LogSource::All);
+ }
+ }
_ => {}
})
.detach();
@@ -4705,12 +4853,11 @@ impl Repository {
.commit_oid_to_index
.insert(commit_data.sha, graph_data.commit_data.len());
graph_data.commit_data.push(commit_data);
-
- cx.emit(RepositoryEvent::GraphEvent(
- graph_data_key.clone(),
- GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
- ));
}
+ cx.emit(RepositoryEvent::GraphEvent(
+ graph_data_key.clone(),
+ GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
+ ));
});
match &graph_data {
@@ -5375,6 +5522,7 @@ impl Repository {
options: Some(proto::commit::CommitOptions {
amend: options.amend,
signoff: options.signoff,
+ allow_empty: options.allow_empty,
}),
askpass_id,
})
@@ -5485,7 +5633,7 @@ impl Repository {
log::info!("head branch after scan is {branch:?}");
let snapshot = this.update(&mut cx, |this, cx| {
this.snapshot.branch = branch;
- cx.emit(RepositoryEvent::BranchChanged);
+ cx.emit(RepositoryEvent::HeadChanged);
this.snapshot.clone()
})?;
if let Some(updates_tx) = updates_tx {
@@ -5856,36 +6004,174 @@ impl Repository {
})
}
+ fn create_worktree_with_start_point(
+ &mut self,
+ start_point: CreateWorktreeStartPoint,
+ path: PathBuf,
+ commit: Option<String>,
+ ) -> oneshot::Receiver<Result<()>> {
+ if matches!(
+ &start_point,
+ CreateWorktreeStartPoint::Branched { name } if name.is_empty()
+ ) {
+ let (sender, receiver) = oneshot::channel();
+ sender
+ .send(Err(anyhow!("branch name cannot be empty")))
+ .ok();
+ return receiver;
+ }
+
+ let id = self.id;
+ let message = match &start_point {
+ CreateWorktreeStartPoint::Detached => "git worktree add (detached)".into(),
+ CreateWorktreeStartPoint::Branched { name } => {
+ format!("git worktree add: {name}").into()
+ }
+ };
+
+ self.send_job(Some(message), move |repo, _cx| async move {
+ let branch_name = match start_point {
+ CreateWorktreeStartPoint::Detached => None,
+ CreateWorktreeStartPoint::Branched { name } => Some(name),
+ };
+ let remote_name = branch_name.clone().unwrap_or_default();
+
+ match repo {
+ RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
+ backend.create_worktree(branch_name, path, commit).await
+ }
+ RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
+ client
+ .request(proto::GitCreateWorktree {
+ project_id: project_id.0,
+ repository_id: id.to_proto(),
+ name: remote_name,
+ directory: path.to_string_lossy().to_string(),
+ commit,
+ })
+ .await?;
+
+ Ok(())
+ }
+ }
+ })
+ }
+
pub fn create_worktree(
&mut self,
branch_name: String,
path: PathBuf,
commit: Option<String>,
) -> oneshot::Receiver<Result<()>> {
+ self.create_worktree_with_start_point(
+ CreateWorktreeStartPoint::Branched { name: branch_name },
+ path,
+ commit,
+ )
+ }
+
+ pub fn create_worktree_detached(
+ &mut self,
+ path: PathBuf,
+ commit: String,
+ ) -> oneshot::Receiver<Result<()>> {
+ self.create_worktree_with_start_point(
+ CreateWorktreeStartPoint::Detached,
+ path,
+ Some(commit),
+ )
+ }
+
+ pub fn head_sha(&mut self) -> oneshot::Receiver<Result<Option<String>>> {
let id = self.id;
- self.send_job(
- Some(format!("git worktree add: {}", branch_name).into()),
- move |repo, _cx| async move {
- match repo {
- RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
- backend.create_worktree(branch_name, path, commit).await
- }
- RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
- client
- .request(proto::GitCreateWorktree {
- project_id: project_id.0,
- repository_id: id.to_proto(),
- name: branch_name,
- directory: path.to_string_lossy().to_string(),
- commit,
- })
- .await?;
+ self.send_job(None, move |repo, _cx| async move {
+ match repo {
+ RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
+ Ok(backend.head_sha().await)
+ }
+ RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
+ let response = client
+ .request(proto::GitGetHeadSha {
+ project_id: project_id.0,
+ repository_id: id.to_proto(),
+ })
+ .await?;
- Ok(())
- }
+ Ok(response.sha)
}
- },
- )
+ }
+ })
+ }
+
+ pub fn update_ref(
+ &mut self,
+ ref_name: String,
+ commit: String,
+ ) -> oneshot::Receiver<Result<()>> {
+ self.send_job(None, move |repo, _cx| async move {
+ match repo {
+ RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
+ backend.update_ref(ref_name, commit).await
+ }
+ RepositoryState::Remote(_) => {
+ anyhow::bail!("update_ref is not supported for remote repositories")
+ }
+ }
+ })
+ }
+
+ pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver<Result<()>> {
+ self.send_job(None, move |repo, _cx| async move {
+ match repo {
+ RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
+ backend.delete_ref(ref_name).await
+ }
+ RepositoryState::Remote(_) => {
+ anyhow::bail!("delete_ref is not supported for remote repositories")
+ }
+ }
+ })
+ }
+
+ pub fn resolve_commit(&mut self, sha: String) -> oneshot::Receiver<Result<bool>> {
+ self.send_job(None, move |repo, _cx| async move {
+ match repo {
+ RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
+ let results = backend.revparse_batch(vec![sha]).await?;
+ Ok(results.into_iter().next().flatten().is_some())
+ }
+ RepositoryState::Remote(_) => {
+ anyhow::bail!("resolve_commit is not supported for remote repositories")
+ }
+ }
+ })
+ }
+
+ pub fn repair_worktrees(&mut self) -> oneshot::Receiver<Result<()>> {
+ self.send_job(None, move |repo, _cx| async move {
+ match repo {
+ RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
+ backend.repair_worktrees().await
+ }
+ RepositoryState::Remote(_) => {
+ anyhow::bail!("repair_worktrees is not supported for remote repositories")
+ }
+ }
+ })
+ }
+
+ pub fn commit_exists(&mut self, sha: String) -> oneshot::Receiver<Result<bool>> {
+ self.send_job(None, move |repo, _cx| async move {
+ match repo {
+ RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
+ let results = backend.revparse_batch(vec![sha]).await?;
+ Ok(results.into_iter().next().flatten().is_some())
+ }
+ RepositoryState::Remote(_) => {
+ anyhow::bail!("commit_exists is not supported for remote repositories")
+ }
+ }
+ })
}
pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
@@ -6210,12 +6496,24 @@ impl Repository {
}
pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
- self.send_job(None, |repo, _cx| async move {
+ let id = self.id;
+ self.send_job(None, move |repo, _cx| async move {
match repo {
RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
backend.checkpoint().await
}
- RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
+ RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
+ let response = client
+ .request(proto::GitCreateCheckpoint {
+ project_id: project_id.0,
+ repository_id: id.to_proto(),
+ })
+ .await?;
+
+ Ok(GitRepositoryCheckpoint {
+ commit_sha: Oid::from_bytes(&response.commit_sha)?,
+ })
+ }
}
})
}
@@ -6224,12 +6522,22 @@ impl Repository {
&mut self,
checkpoint: GitRepositoryCheckpoint,
) -> oneshot::Receiver<Result<()>> {
+ let id = self.id;
self.send_job(None, move |repo, _cx| async move {
match repo {
RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
backend.restore_checkpoint(checkpoint).await
}
- RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
+ RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
+ client
+ .request(proto::GitRestoreCheckpoint {
+ project_id: project_id.0,
+ repository_id: id.to_proto(),
+ commit_sha: checkpoint.commit_sha.as_bytes().to_vec(),
+ })
+ .await?;
+ Ok(())
+ }
}
})
}
@@ -6249,7 +6557,7 @@ impl Repository {
.as_ref()
.map(proto_to_commit_details);
if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
- cx.emit(RepositoryEvent::BranchChanged)
+ cx.emit(RepositoryEvent::HeadChanged)
}
self.snapshot.branch = new_branch;
self.snapshot.head_commit = new_head_commit;
@@ -6323,12 +6631,23 @@ impl Repository {
left: GitRepositoryCheckpoint,
right: GitRepositoryCheckpoint,
) -> oneshot::Receiver<Result<bool>> {
+ let id = self.id;
self.send_job(None, move |repo, _cx| async move {
match repo {
RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
backend.compare_checkpoints(left, right).await
}
- RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
+ RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
+ let response = client
+ .request(proto::GitCompareCheckpoints {
+ project_id: project_id.0,
+ repository_id: id.to_proto(),
+ left_commit_sha: left.commit_sha.as_bytes().to_vec(),
+ right_commit_sha: right.commit_sha.as_bytes().to_vec(),
+ })
+ .await?;
+ Ok(response.equal)
+ }
}
})
}
@@ -6338,6 +6657,7 @@ impl Repository {
base_checkpoint: GitRepositoryCheckpoint,
target_checkpoint: GitRepositoryCheckpoint,
) -> oneshot::Receiver<Result<String>> {
+ let id = self.id;
self.send_job(None, move |repo, _cx| async move {
match repo {
RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
@@ -6345,7 +6665,17 @@ impl Repository {
.diff_checkpoints(base_checkpoint, target_checkpoint)
.await
}
- RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
+ RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
+ let response = client
+ .request(proto::GitDiffCheckpoints {
+ project_id: project_id.0,
+ repository_id: id.to_proto(),
+ base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(),
+ target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(),
+ })
+ .await?;
+ Ok(response.diff)
+ }
}
})
}
@@ -6428,7 +6758,7 @@ impl Repository {
let state = RepositoryState::Local(state);
let mut jobs = VecDeque::new();
loop {
- while let Ok(Some(next_job)) = job_rx.try_next() {
+ while let Ok(next_job) = job_rx.try_recv() {
jobs.push_back(next_job);
}
@@ -6464,7 +6794,7 @@ impl Repository {
let state = RepositoryState::Remote(state);
let mut jobs = VecDeque::new();
loop {
- while let Ok(Some(next_job)) = job_rx.try_next() {
+ while let Ok(next_job) = job_rx.try_recv() {
jobs.push_back(next_job);
}
@@ -7165,7 +7495,8 @@ async fn compute_snapshot(
}
})
.await?;
- let branch = branches.into_iter().find(|branch| branch.is_head);
+ let branch = branches.iter().find(|branch| branch.is_head).cloned();
+ let branch_list: Arc<[Branch]> = branches.into();
let linked_worktrees: Arc<[GitWorktree]> = all_worktrees
.into_iter()
@@ -7188,14 +7519,16 @@ async fn compute_snapshot(
.await?;
let snapshot = this.update(cx, |this, cx| {
- let branch_changed =
+ let head_changed =
branch != this.snapshot.branch || head_commit != this.snapshot.head_commit;
+ let branch_list_changed = *branch_list != *this.snapshot.branch_list;
let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees;
this.snapshot = RepositorySnapshot {
id,
work_directory_abs_path,
branch,
+ branch_list: branch_list.clone(),
head_commit,
remote_origin_url,
remote_upstream_url,
@@ -7204,8 +7537,12 @@ async fn compute_snapshot(
..prev_snapshot
};
- if branch_changed {
- cx.emit(RepositoryEvent::BranchChanged);
+ if head_changed {
+ cx.emit(RepositoryEvent::HeadChanged);
+ }
+
+ if branch_list_changed {
+ cx.emit(RepositoryEvent::BranchListChanged);
}
if worktrees_changed {
@@ -70,7 +70,7 @@ impl BranchDiff {
}
GitStoreEvent::RepositoryUpdated(
event_repo_id,
- RepositoryEvent::StatusesChanged | RepositoryEvent::BranchChanged,
+ RepositoryEvent::StatusesChanged | RepositoryEvent::HeadChanged,
_,
) => this
.repo
@@ -4414,7 +4414,8 @@ impl LspStore {
}
worktree::Event::UpdatedGitRepositories(_)
| worktree::Event::DeletedEntry(_)
- | worktree::Event::Deleted => {}
+ | worktree::Event::Deleted
+ | worktree::Event::UpdatedRootRepoCommonDir => {}
})
.detach()
}
@@ -585,7 +585,7 @@ async fn raw_to_buffer_semantic_tokens(
}
Some(BufferSemanticToken {
- range: buffer_snapshot.anchor_range_around(start..end),
+ range: buffer_snapshot.anchor_range_inside(start..end),
token_type: token.token_type,
token_modifiers: token.token_modifiers,
})
@@ -59,7 +59,7 @@ impl WorktreeRoots {
let path = TriePath::from(entry.path.as_ref());
this.roots.remove(&path);
}
- WorktreeEvent::Deleted => {}
+ WorktreeEvent::Deleted | WorktreeEvent::UpdatedRootRepoCommonDir => {}
}
}),
})
@@ -1032,6 +1032,8 @@ impl DirectoryLister {
}
}
+pub const CURRENT_PROJECT_FEATURES: &[&str] = &["new-style-anchors"];
+
#[cfg(feature = "test-support")]
pub const DEFAULT_COMPLETION_CONTEXT: CompletionContext = CompletionContext {
trigger_kind: lsp::CompletionTriggerKind::INVOKED,
@@ -1228,12 +1230,23 @@ impl Project {
)
});
+ let git_store = cx.new(|cx| {
+ GitStore::local(
+ &worktree_store,
+ buffer_store.clone(),
+ environment.clone(),
+ fs.clone(),
+ cx,
+ )
+ });
+
let task_store = cx.new(|cx| {
TaskStore::local(
buffer_store.downgrade(),
worktree_store.clone(),
toolchain_store.read(cx).as_language_toolchain_store(),
environment.clone(),
+ git_store.clone(),
cx,
)
});
@@ -1269,16 +1282,6 @@ impl Project {
)
});
- let git_store = cx.new(|cx| {
- GitStore::local(
- &worktree_store,
- buffer_store.clone(),
- environment.clone(),
- fs.clone(),
- cx,
- )
- });
-
let agent_server_store = cx.new(|cx| {
AgentServerStore::local(
node.clone(),
@@ -1413,30 +1416,6 @@ impl Project {
)
});
- let task_store = cx.new(|cx| {
- TaskStore::remote(
- buffer_store.downgrade(),
- worktree_store.clone(),
- toolchain_store.read(cx).as_language_toolchain_store(),
- remote.read(cx).proto_client(),
- REMOTE_SERVER_PROJECT_ID,
- cx,
- )
- });
-
- let settings_observer = cx.new(|cx| {
- SettingsObserver::new_remote(
- fs.clone(),
- worktree_store.clone(),
- task_store.clone(),
- Some(remote_proto.clone()),
- false,
- cx,
- )
- });
- cx.subscribe(&settings_observer, Self::on_settings_observer_event)
- .detach();
-
let context_server_store = cx.new(|cx| {
ContextServerStore::remote(
rpc::proto::REMOTE_SERVER_PROJECT_ID,
@@ -1501,6 +1480,31 @@ impl Project {
)
});
+ let task_store = cx.new(|cx| {
+ TaskStore::remote(
+ buffer_store.downgrade(),
+ worktree_store.clone(),
+ toolchain_store.read(cx).as_language_toolchain_store(),
+ remote.read(cx).proto_client(),
+ REMOTE_SERVER_PROJECT_ID,
+ git_store.clone(),
+ cx,
+ )
+ });
+
+ let settings_observer = cx.new(|cx| {
+ SettingsObserver::new_remote(
+ fs.clone(),
+ worktree_store.clone(),
+ task_store.clone(),
+ Some(remote_proto.clone()),
+ false,
+ cx,
+ )
+ });
+ cx.subscribe(&settings_observer, Self::on_settings_observer_event)
+ .detach();
+
let agent_server_store = cx.new(|_| {
AgentServerStore::remote(
REMOTE_SERVER_PROJECT_ID,
@@ -1644,6 +1648,10 @@ impl Project {
project_id: remote_id,
committer_email: committer.email,
committer_name: committer.name,
+ features: CURRENT_PROJECT_FEATURES
+ .iter()
+ .map(|s| s.to_string())
+ .collect(),
})
.await?;
Self::from_join_project_response(
@@ -1726,6 +1734,17 @@ impl Project {
)
});
+ let git_store = cx.new(|cx| {
+ GitStore::remote(
+ // In this remote case we pass None for the environment
+ &worktree_store,
+ buffer_store.clone(),
+ client.clone().into(),
+ remote_id,
+ cx,
+ )
+ });
+
let task_store = cx.new(|cx| {
if run_tasks {
TaskStore::remote(
@@ -1734,6 +1753,7 @@ impl Project {
Arc::new(EmptyToolchainStore),
client.clone().into(),
remote_id,
+ git_store.clone(),
cx,
)
} else {
@@ -1752,17 +1772,6 @@ impl Project {
)
});
- let git_store = cx.new(|cx| {
- GitStore::remote(
- // In this remote case we pass None for the environment
- &worktree_store,
- buffer_store.clone(),
- client.clone().into(),
- remote_id,
- cx,
- )
- });
-
let agent_server_store = cx.new(|_cx| AgentServerStore::collab());
let replica_id = ReplicaId::new(response.payload.replica_id as u16);
@@ -2340,6 +2349,22 @@ impl Project {
.find(|tree| tree.read(cx).root_name() == root_name)
}
+ pub fn project_group_key(&self, cx: &App) -> ProjectGroupKey {
+ let roots = self
+ .visible_worktrees(cx)
+ .map(|worktree| {
+ let snapshot = worktree.read(cx).snapshot();
+ snapshot
+ .root_repo_common_dir()
+ .and_then(|dir| Some(dir.parent()?.to_path_buf()))
+ .unwrap_or(snapshot.abs_path().to_path_buf())
+ })
+ .collect::<Vec<_>>();
+ let host = self.remote_connection_options(cx);
+ let path_list = PathList::new(&roots);
+ ProjectGroupKey::new(host, path_list)
+ }
+
#[inline]
pub fn worktree_root_names<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a str> {
self.visible_worktrees(cx)
@@ -4738,6 +4763,19 @@ impl Project {
});
}
+ pub fn remove_worktree_for_main_worktree_path(
+ &mut self,
+ path: impl AsRef<Path>,
+ cx: &mut Context<Self>,
+ ) {
+ let path = path.as_ref();
+ self.worktree_store.update(cx, |worktree_store, cx| {
+ if let Some(worktree) = worktree_store.worktree_for_main_worktree_path(path, cx) {
+ worktree_store.remove_worktree(worktree.read(cx).id(), cx);
+ }
+ });
+ }
+
fn add_worktree(&mut self, worktree: &Entity<Worktree>, cx: &mut Context<Self>) {
self.worktree_store.update(cx, |worktree_store, cx| {
worktree_store.add(worktree, cx);
@@ -6009,6 +6047,80 @@ impl Project {
}
}
+/// Identifies a project group by a set of paths the workspaces in this group
+/// have.
+///
+/// Paths are mapped to their main worktree path first so we can group
+/// workspaces by main repos.
+#[derive(PartialEq, Eq, Hash, Clone, Debug)]
+pub struct ProjectGroupKey {
+ paths: PathList,
+ host: Option<RemoteConnectionOptions>,
+}
+
+impl ProjectGroupKey {
+ /// Creates a new `ProjectGroupKey` with the given path list.
+ ///
+ /// The path list should point to the git main worktree paths for a project.
+ pub fn new(host: Option<RemoteConnectionOptions>, paths: PathList) -> Self {
+ Self { paths, host }
+ }
+
+ pub fn display_name(&self) -> SharedString {
+ let mut names = Vec::with_capacity(self.paths.paths().len());
+ for abs_path in self.paths.paths() {
+ if let Some(name) = abs_path.file_name() {
+ names.push(name.to_string_lossy().to_string());
+ }
+ }
+ if names.is_empty() {
+ // TODO: Can we do something better in this case?
+ "Empty Workspace".into()
+ } else {
+ names.join(", ").into()
+ }
+ }
+
+ pub fn path_list(&self) -> &PathList {
+ &self.paths
+ }
+
+ pub fn display_name_from_suffixes(
+ &self,
+ path_detail_map: &std::collections::HashMap<PathBuf, usize>,
+ ) -> SharedString {
+ let mut names = Vec::with_capacity(self.paths.paths().len());
+ for abs_path in self.paths.paths() {
+ let detail = path_detail_map.get(abs_path).copied().unwrap_or(0);
+ let suffix = path_suffix(abs_path, detail);
+ if !suffix.is_empty() {
+ names.push(suffix);
+ }
+ }
+ if names.is_empty() {
+ "Empty Workspace".into()
+ } else {
+ names.join(", ").into()
+ }
+ }
+
+ pub fn host(&self) -> Option<RemoteConnectionOptions> {
+ self.host.clone()
+ }
+}
+
+pub fn path_suffix(path: &Path, detail: usize) -> String {
+ let components: Vec<_> = path
+ .components()
+ .filter_map(|component| match component {
+ std::path::Component::Normal(s) => Some(s.to_string_lossy()),
+ _ => None,
+ })
+ .collect();
+ let start = components.len().saturating_sub(detail + 1);
+ components[start..].join("/")
+}
+
pub struct PathMatchCandidateSet {
pub snapshot: Snapshot,
pub include_ignored: bool,
@@ -620,4 +620,56 @@ impl SearchQuery {
Self::Text { .. } => None,
}
}
+
+ pub fn search_str(&self, text: &str) -> Vec<Range<usize>> {
+ if self.as_str().is_empty() {
+ return Vec::new();
+ }
+
+ let is_word_char = |c: char| c.is_alphanumeric() || c == '_';
+
+ let mut matches = Vec::new();
+ match self {
+ Self::Text {
+ search, whole_word, ..
+ } => {
+ for mat in search.find_iter(text.as_bytes()) {
+ if *whole_word {
+ let prev_char = text[..mat.start()].chars().last();
+ let next_char = text[mat.end()..].chars().next();
+ if prev_char.is_some_and(&is_word_char)
+ || next_char.is_some_and(&is_word_char)
+ {
+ continue;
+ }
+ }
+ matches.push(mat.start()..mat.end());
+ }
+ }
+ Self::Regex {
+ regex,
+ multiline,
+ one_match_per_line,
+ ..
+ } => {
+ if *multiline {
+ for mat in regex.find_iter(text).flatten() {
+ matches.push(mat.start()..mat.end());
+ }
+ } else {
+ let mut line_offset = 0;
+ for line in text.split('\n') {
+ for mat in regex.find_iter(line).flatten() {
+ matches.push((line_offset + mat.start())..(line_offset + mat.end()));
+ if *one_match_per_line {
+ break;
+ }
+ }
+ line_offset += line.len() + 1;
+ }
+ }
+ }
+ }
+ matches
+ }
}
@@ -21,14 +21,14 @@ use lsp::{LanguageServerId, LanguageServerName};
use paths::{debug_task_file_name, task_file_name};
use settings::{InvalidSettingsError, parse_json_with_comments};
use task::{
- DebugScenario, ResolvedTask, SharedTaskContext, TaskContext, TaskId, TaskTemplate,
+ DebugScenario, ResolvedTask, SharedTaskContext, TaskContext, TaskHook, TaskId, TaskTemplate,
TaskTemplates, TaskVariables, VariableName,
};
use text::{BufferId, Point, ToPoint};
use util::{NumericPrefixWithSuffix, ResultExt as _, post_inc, rel_path::RelPath};
use worktree::WorktreeId;
-use crate::{task_store::TaskSettingsLocation, worktree_store::WorktreeStore};
+use crate::{git_store::GitStore, task_store::TaskSettingsLocation, worktree_store::WorktreeStore};
#[derive(Clone, Debug, Default)]
pub struct DebugScenarioContext {
@@ -644,6 +644,19 @@ impl Inventory {
self.last_scheduled_tasks.retain(|(_, task)| &task.id != id);
}
+ /// Returns all task templates (worktree and global) that have at least one
+ /// hook in the provided set.
+ pub fn templates_with_hooks(
+ &self,
+ hooks: &HashSet<TaskHook>,
+ worktree: WorktreeId,
+ ) -> Vec<(TaskSourceKind, TaskTemplate)> {
+ self.worktree_templates_from_settings(worktree)
+ .chain(self.global_templates_from_settings())
+ .filter(|(_, template)| !template.hooks.is_disjoint(hooks))
+ .collect()
+ }
+
fn global_templates_from_settings(
&self,
) -> impl '_ + Iterator<Item = (TaskSourceKind, TaskTemplate)> {
@@ -918,11 +931,15 @@ fn task_variables_preference(task: &ResolvedTask) -> Reverse<usize> {
/// Applied as a base for every custom [`ContextProvider`] unless explicitly oped out.
pub struct BasicContextProvider {
worktree_store: Entity<WorktreeStore>,
+ git_store: Entity<GitStore>,
}
impl BasicContextProvider {
- pub fn new(worktree_store: Entity<WorktreeStore>) -> Self {
- Self { worktree_store }
+ pub fn new(worktree_store: Entity<WorktreeStore>, git_store: Entity<GitStore>) -> Self {
+ Self {
+ worktree_store,
+ git_store,
+ }
}
}
@@ -1002,6 +1019,19 @@ impl ContextProvider for BasicContextProvider {
}
}
+ if let Some(worktree_id) = location.buffer.read(cx).file().map(|f| f.worktree_id(cx)) {
+ if let Some(path) = self
+ .git_store
+ .read(cx)
+ .original_repo_path_for_worktree(worktree_id, cx)
+ {
+ task_variables.insert(
+ VariableName::MainGitWorktree,
+ path.to_string_lossy().into_owned(),
+ );
+ }
+ }
+
if let Some(current_file) = current_file {
let path = current_file.abs_path(cx);
if let Some(filename) = path.file_name().and_then(|f| f.to_str()) {
@@ -19,7 +19,7 @@ use util::ResultExt;
use crate::{
BasicContextProvider, Inventory, ProjectEnvironment, buffer_store::BufferStore,
- worktree_store::WorktreeStore,
+ git_store::GitStore, worktree_store::WorktreeStore,
};
// platform-dependent warning
@@ -33,6 +33,7 @@ pub struct StoreState {
task_inventory: Entity<Inventory>,
buffer_store: WeakEntity<BufferStore>,
worktree_store: Entity<WorktreeStore>,
+ git_store: Entity<GitStore>,
toolchain_store: Arc<dyn LanguageToolchainStore>,
}
@@ -163,6 +164,7 @@ impl TaskStore {
worktree_store: Entity<WorktreeStore>,
toolchain_store: Arc<dyn LanguageToolchainStore>,
environment: Entity<ProjectEnvironment>,
+ git_store: Entity<GitStore>,
cx: &mut Context<Self>,
) -> Self {
Self::Functional(StoreState {
@@ -172,6 +174,7 @@ impl TaskStore {
},
task_inventory: Inventory::new(cx),
buffer_store,
+ git_store,
toolchain_store,
worktree_store,
})
@@ -183,6 +186,7 @@ impl TaskStore {
toolchain_store: Arc<dyn LanguageToolchainStore>,
upstream_client: AnyProtoClient,
project_id: u64,
+ git_store: Entity<GitStore>,
cx: &mut Context<Self>,
) -> Self {
Self::Functional(StoreState {
@@ -192,6 +196,7 @@ impl TaskStore {
},
task_inventory: Inventory::new(cx),
buffer_store,
+ git_store,
toolchain_store,
worktree_store,
})
@@ -207,6 +212,7 @@ impl TaskStore {
TaskStore::Functional(state) => match &state.mode {
StoreMode::Local { environment, .. } => local_task_context_for_location(
state.worktree_store.clone(),
+ state.git_store.clone(),
state.toolchain_store.clone(),
environment.clone(),
captured_variables,
@@ -220,6 +226,7 @@ impl TaskStore {
*project_id,
upstream_client.clone(),
state.worktree_store.clone(),
+ state.git_store.clone(),
captured_variables,
location,
state.toolchain_store.clone(),
@@ -302,6 +309,7 @@ impl TaskStore {
fn local_task_context_for_location(
worktree_store: Entity<WorktreeStore>,
+ git_store: Entity<GitStore>,
toolchain_store: Arc<dyn LanguageToolchainStore>,
environment: Entity<ProjectEnvironment>,
captured_variables: TaskVariables,
@@ -329,7 +337,7 @@ fn local_task_context_for_location(
worktree_store.clone(),
location,
project_env.clone(),
- BasicContextProvider::new(worktree_store),
+ BasicContextProvider::new(worktree_store, git_store),
toolchain_store,
cx,
)
@@ -351,6 +359,7 @@ fn remote_task_context_for_location(
project_id: u64,
upstream_client: AnyProtoClient,
worktree_store: Entity<WorktreeStore>,
+ git_store: Entity<GitStore>,
captured_variables: TaskVariables,
location: Location,
toolchain_store: Arc<dyn LanguageToolchainStore>,
@@ -362,7 +371,7 @@ fn remote_task_context_for_location(
.update(|cx| {
let worktree_root = worktree_root(&worktree_store, &location, cx);
- BasicContextProvider::new(worktree_store).build_context(
+ BasicContextProvider::new(worktree_store, git_store).build_context(
&TaskVariables::default(),
ContextLocation {
fs: None,
@@ -812,6 +812,7 @@ impl WorktreeStore {
// The worktree root itself has been deleted (for single-file worktrees)
// The worktree will be removed via the observe_release callback
}
+ worktree::Event::UpdatedRootRepoCommonDir => {}
}
})
.detach();
@@ -849,6 +850,21 @@ impl WorktreeStore {
self.send_project_updates(cx);
}
+ pub fn worktree_for_main_worktree_path(
+ &self,
+ path: &Path,
+ cx: &App,
+ ) -> Option<Entity<Worktree>> {
+ self.visible_worktrees(cx).find(|worktree| {
+ let worktree = worktree.read(cx);
+ if let Some(common_dir) = worktree.root_repo_common_dir() {
+ common_dir.parent() == Some(path)
+ } else {
+ worktree.abs_path().as_ref() == path
+ }
+ })
+ }
+
pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) {
self.worktrees_reordered = worktrees_reordered;
}
@@ -23,6 +23,7 @@ mod go_locator {
show_summary: true,
show_command: true,
save: SaveStrategy::default(),
+ hooks: Default::default(),
};
let scenario = locator
@@ -51,6 +52,7 @@ mod go_locator {
show_summary: true,
show_command: true,
save: SaveStrategy::default(),
+ hooks: Default::default(),
};
let scenario = locator
@@ -190,6 +192,7 @@ mod go_locator {
show_summary: true,
show_command: true,
save: SaveStrategy::default(),
+ hooks: Default::default(),
};
let scenario = locator
@@ -225,6 +228,7 @@ mod python_locator {
show_summary: false,
show_command: false,
save: task::SaveStrategy::default(),
+ hooks: Default::default(),
};
let expected_scenario = DebugScenario {
@@ -43,7 +43,7 @@ fn test_multi_len_chars_normalization() {
let mut label = CodeLabel::new(
"myElˇ (parameter) myElˇ: {\n foo: string;\n}".to_string(),
0..6,
- vec![(0..6, HighlightId(1))],
+ vec![(0..6, HighlightId::new(1))],
);
ensure_uniform_list_compatible_label(&mut label);
assert_eq!(
@@ -51,7 +51,7 @@ fn test_multi_len_chars_normalization() {
CodeLabel::new(
"myElˇ (parameter) myElˇ: { foo: string; }".to_string(),
0..6,
- vec![(0..6, HighlightId(1))],
+ vec![(0..6, HighlightId::new(1))],
)
);
}
@@ -1771,7 +1771,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
DiagnosticSet::from_sorted_entries(
vec![DiagnosticEntry {
diagnostic: Default::default(),
- range: Anchor::MIN..Anchor::MAX,
+ range: Anchor::min_max_range_for_buffer(buffer.remote_id()),
}],
&buffer.snapshot(),
),
@@ -4448,7 +4448,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
// Assert no new language server started
cx.executor().run_until_parked();
- assert!(fake_servers.try_next().is_err());
+ assert!(fake_servers.try_recv().is_err());
assert_eq!(definitions.len(), 1);
let definition = definitions.pop().unwrap();
@@ -8525,9 +8525,10 @@ async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
unstaged_diff.update(cx, |unstaged_diff, cx| {
let snapshot = buffer.read(cx).snapshot();
assert_hunks(
- unstaged_diff
- .snapshot(cx)
- .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
+ unstaged_diff.snapshot(cx).hunks_intersecting_range(
+ Anchor::min_max_range_for_buffer(snapshot.remote_id()),
+ &snapshot,
+ ),
&snapshot,
&unstaged_diff.base_text(cx).text(),
&[(
@@ -8616,8 +8617,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
diff_1.update(cx, |diff, cx| {
let snapshot = buffer_1.read(cx).snapshot();
assert_hunks(
- diff.snapshot(cx)
- .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
+ diff.snapshot(cx).hunks_intersecting_range(
+ Anchor::min_max_range_for_buffer(snapshot.remote_id()),
+ &snapshot,
+ ),
&snapshot,
&diff.base_text_string(cx).unwrap(),
&[
@@ -8658,8 +8661,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
diff_1.update(cx, |diff, cx| {
let snapshot = buffer_1.read(cx).snapshot();
assert_hunks(
- diff.snapshot(cx)
- .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
+ diff.snapshot(cx).hunks_intersecting_range(
+ Anchor::min_max_range_for_buffer(snapshot.remote_id()),
+ &snapshot,
+ ),
&snapshot,
&diff.base_text(cx).text(),
&[(
@@ -8688,8 +8693,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
diff_2.update(cx, |diff, cx| {
let snapshot = buffer_2.read(cx).snapshot();
assert_hunks(
- diff.snapshot(cx)
- .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
+ diff.snapshot(cx).hunks_intersecting_range(
+ Anchor::min_max_range_for_buffer(snapshot.remote_id()),
+ &snapshot,
+ ),
&snapshot,
&diff.base_text_string(cx).unwrap(),
&[(
@@ -8710,8 +8717,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
diff_2.update(cx, |diff, cx| {
let snapshot = buffer_2.read(cx).snapshot();
assert_hunks(
- diff.snapshot(cx)
- .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
+ diff.snapshot(cx).hunks_intersecting_range(
+ Anchor::min_max_range_for_buffer(snapshot.remote_id()),
+ &snapshot,
+ ),
&snapshot,
&diff.base_text_string(cx).unwrap(),
&[(
@@ -11152,7 +11161,7 @@ async fn test_odd_events_for_ignored_dirs(
assert_eq!(
repository_updates.lock().drain(..).collect::<Vec<_>>(),
vec![
- RepositoryEvent::BranchChanged,
+ RepositoryEvent::HeadChanged,
RepositoryEvent::StatusesChanged,
RepositoryEvent::StatusesChanged,
],
@@ -7126,7 +7126,7 @@ impl Render for ProjectPanel {
.workspace
.update(cx, |workspace, cx| {
workspace.open_workspace_for_paths(
- OpenMode::Replace,
+ OpenMode::Activate,
external_paths.paths().to_owned(),
window,
cx,
@@ -140,11 +140,20 @@ impl PickerDelegate for ProjectSymbolsDelegate {
);
editor.update(cx, |editor, cx| {
+ let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx);
+ let Some(buffer_snapshot) = multibuffer_snapshot.as_singleton() else {
+ return;
+ };
+ let text_anchor = buffer_snapshot.anchor_before(position);
+ let Some(anchor) = multibuffer_snapshot.anchor_in_buffer(text_anchor)
+ else {
+ return;
+ };
editor.change_selections(
SelectionEffects::scroll(Autoscroll::center()),
window,
cx,
- |s| s.select_ranges([position..position]),
+ |s| s.select_ranges([anchor..anchor]),
);
});
})?;
@@ -212,10 +212,15 @@ message Selection {
}
message EditorAnchor {
- uint64 excerpt_id = 1;
+ optional uint64 excerpt_id = 1;
Anchor anchor = 2;
}
+message PathKey {
+ optional uint64 sort_prefix = 1;
+ string path = 2;
+}
+
enum CursorShape {
CursorBar = 0;
CursorBlock = 1;
@@ -174,6 +174,7 @@ message ShareProject {
reserved 3;
bool is_ssh_project = 4;
optional bool windows_paths = 5;
+ repeated string features = 6;
}
message ShareProjectResponse {
@@ -193,6 +194,7 @@ message JoinProject {
uint64 project_id = 1;
optional string committer_email = 2;
optional string committer_name = 3;
+ repeated string features = 4;
}
message JoinProjectResponse {
@@ -204,6 +206,7 @@ message JoinProjectResponse {
repeated string language_server_capabilities = 8;
ChannelRole role = 6;
bool windows_paths = 9;
+ repeated string features = 10;
reserved 7;
}
@@ -222,6 +225,7 @@ message UpdateWorktree {
uint64 scan_id = 8;
bool is_last_update = 9;
string abs_path = 10;
+ optional string root_repo_common_dir = 11;
}
// deprecated
@@ -359,6 +363,8 @@ message UpdateView {
reserved 7;
double scroll_x = 8;
double scroll_y = 9;
+ repeated PathExcerpts updated_paths = 10;
+ repeated uint64 deleted_buffers = 11;
}
}
@@ -385,6 +391,7 @@ message View {
reserved 8;
double scroll_x = 9;
double scroll_y = 10;
+ repeated PathExcerpts path_excerpts = 11;
}
message ChannelView {
@@ -407,6 +414,19 @@ message Excerpt {
Anchor primary_end = 6;
}
+message ExcerptRange {
+ Anchor context_start = 1;
+ Anchor context_end = 2;
+ Anchor primary_start = 3;
+ Anchor primary_end = 4;
+}
+
+message PathExcerpts {
+ PathKey path_key = 1;
+ uint64 buffer_id = 2;
+ repeated ExcerptRange ranges = 3;
+}
+
message Contact {
uint64 user_id = 1;
bool online = 2;
@@ -403,6 +403,7 @@ message Commit {
message CommitOptions {
bool amend = 1;
bool signoff = 2;
+ bool allow_empty = 3;
}
}
@@ -567,6 +568,15 @@ message GitGetWorktrees {
uint64 repository_id = 2;
}
+message GitGetHeadSha {
+ uint64 project_id = 1;
+ uint64 repository_id = 2;
+}
+
+message GitGetHeadShaResponse {
+ optional string sha = 1;
+}
+
message GitWorktreesResponse {
repeated Worktree worktrees = 1;
}
@@ -586,6 +596,43 @@ message GitCreateWorktree {
optional string commit = 5;
}
+message GitCreateCheckpoint {
+ uint64 project_id = 1;
+ uint64 repository_id = 2;
+}
+
+message GitCreateCheckpointResponse {
+ bytes commit_sha = 1;
+}
+
+message GitRestoreCheckpoint {
+ uint64 project_id = 1;
+ uint64 repository_id = 2;
+ bytes commit_sha = 3;
+}
+
+message GitCompareCheckpoints {
+ uint64 project_id = 1;
+ uint64 repository_id = 2;
+ bytes left_commit_sha = 3;
+ bytes right_commit_sha = 4;
+}
+
+message GitCompareCheckpointsResponse {
+ bool equal = 1;
+}
+
+message GitDiffCheckpoints {
+ uint64 project_id = 1;
+ uint64 repository_id = 2;
+ bytes base_commit_sha = 3;
+ bytes target_commit_sha = 4;
+}
+
+message GitDiffCheckpointsResponse {
+ string diff = 1;
+}
+
message GitRemoveWorktree {
uint64 project_id = 1;
uint64 repository_id = 2;
@@ -467,7 +467,16 @@ message Envelope {
SpawnKernelResponse spawn_kernel_response = 427;
KillKernel kill_kernel = 428;
GitRemoveWorktree git_remove_worktree = 431;
- GitRenameWorktree git_rename_worktree = 432; // current max
+ GitRenameWorktree git_rename_worktree = 432;
+ GitCreateCheckpoint git_create_checkpoint = 433;
+ GitCreateCheckpointResponse git_create_checkpoint_response = 434;
+ GitRestoreCheckpoint git_restore_checkpoint = 435;
+ GitCompareCheckpoints git_compare_checkpoints = 436;
+ GitCompareCheckpointsResponse git_compare_checkpoints_response = 437;
+ GitDiffCheckpoints git_diff_checkpoints = 438;
+ GitDiffCheckpointsResponse git_diff_checkpoints_response = 439;
+ GitGetHeadSha git_get_head_sha = 440;
+ GitGetHeadShaResponse git_get_head_sha_response = 441; // current max
}
reserved 87 to 88;
@@ -294,6 +294,13 @@ messages!(
(GitCommitDetails, Background),
(GitFileHistory, Background),
(GitFileHistoryResponse, Background),
+ (GitCreateCheckpoint, Background),
+ (GitCreateCheckpointResponse, Background),
+ (GitRestoreCheckpoint, Background),
+ (GitCompareCheckpoints, Background),
+ (GitCompareCheckpointsResponse, Background),
+ (GitDiffCheckpoints, Background),
+ (GitDiffCheckpointsResponse, Background),
(SetIndexText, Background),
(Push, Background),
(Fetch, Background),
@@ -344,6 +351,8 @@ messages!(
(NewExternalAgentVersionAvailable, Background),
(RemoteStarted, Background),
(GitGetWorktrees, Background),
+ (GitGetHeadSha, Background),
+ (GitGetHeadShaResponse, Background),
(GitWorktreesResponse, Background),
(GitCreateWorktree, Background),
(GitRemoveWorktree, Background),
@@ -514,6 +523,10 @@ request_messages!(
(RegisterBufferWithLanguageServers, Ack),
(GitShow, GitCommitDetails),
(GitFileHistory, GitFileHistoryResponse),
+ (GitCreateCheckpoint, GitCreateCheckpointResponse),
+ (GitRestoreCheckpoint, Ack),
+ (GitCompareCheckpoints, GitCompareCheckpointsResponse),
+ (GitDiffCheckpoints, GitDiffCheckpointsResponse),
(GitReset, Ack),
(GitDeleteBranch, Ack),
(GitCheckoutFiles, Ack),
@@ -547,6 +560,7 @@ request_messages!(
(GetContextServerCommand, ContextServerCommand),
(RemoteStarted, Ack),
(GitGetWorktrees, GitWorktreesResponse),
+ (GitGetHeadSha, GitGetHeadShaResponse),
(GitCreateWorktree, Ack),
(GitRemoveWorktree, Ack),
(GitRenameWorktree, Ack),
@@ -696,6 +710,10 @@ entity_messages!(
RegisterBufferWithLanguageServers,
GitShow,
GitFileHistory,
+ GitCreateCheckpoint,
+ GitRestoreCheckpoint,
+ GitCompareCheckpoints,
+ GitDiffCheckpoints,
GitReset,
GitDeleteBranch,
GitCheckoutFiles,
@@ -734,6 +752,7 @@ entity_messages!(
ExternalAgentLoadingStatusUpdated,
NewExternalAgentVersionAvailable,
GitGetWorktrees,
+ GitGetHeadSha,
GitCreateWorktree,
GitRemoveWorktree,
GitRenameWorktree,
@@ -866,6 +885,7 @@ pub fn split_worktree_update(mut message: UpdateWorktree) -> impl Iterator<Item
worktree_id: message.worktree_id,
root_name: message.root_name.clone(),
abs_path: message.abs_path.clone(),
+ root_repo_common_dir: message.root_repo_common_dir.clone(),
updated_entries,
removed_entries,
scan_id: message.scan_id,
@@ -30,17 +30,20 @@ fn project_devcontainer_key(project_path: &str) -> String {
}
pub fn suggest_on_worktree_updated(
+ workspace: &mut Workspace,
worktree_id: WorktreeId,
updated_entries: &UpdatedEntriesSet,
project: &gpui::Entity<Project>,
window: &mut Window,
cx: &mut Context<Workspace>,
) {
+ let cli_auto_open = workspace.open_in_dev_container();
+
let devcontainer_updated = updated_entries.iter().any(|(path, _, _)| {
path.as_ref() == devcontainer_dir_path() || path.as_ref() == devcontainer_json_path()
});
- if !devcontainer_updated {
+ if !devcontainer_updated && !cli_auto_open {
return;
}
@@ -54,7 +57,35 @@ pub fn suggest_on_worktree_updated(
return;
}
- if find_configs_in_snapshot(worktree).is_empty() {
+ let has_configs = !find_configs_in_snapshot(worktree).is_empty();
+
+ if cli_auto_open {
+ workspace.set_open_in_dev_container(false);
+ let task = cx.spawn_in(window, async move |workspace, cx| {
+ let scans_complete =
+ workspace.update(cx, |workspace, cx| workspace.worktree_scans_complete(cx))?;
+ scans_complete.await;
+
+ workspace.update_in(cx, |workspace, window, cx| {
+ let has_configs = workspace
+ .project()
+ .read(cx)
+ .worktrees(cx)
+ .any(|wt| !find_configs_in_snapshot(wt.read(cx)).is_empty());
+ if has_configs {
+ cx.on_next_frame(window, move |_workspace, window, cx| {
+ window.dispatch_action(Box::new(zed_actions::OpenDevContainer), cx);
+ });
+ } else {
+ log::warn!("--dev-container: no devcontainer configuration found in project");
+ }
+ })
+ });
+ workspace.set_dev_container_task(task);
+ return;
+ }
+
+ if !has_configs {
return;
}
@@ -52,7 +52,10 @@ use workspace::{
};
use zed_actions::{OpenDevContainer, OpenRecent, OpenRemote};
-actions!(recent_projects, [ToggleActionsMenu]);
+actions!(
+ recent_projects,
+ [ToggleActionsMenu, RemoveSelected, AddToWorkspace,]
+);
#[derive(Clone, Debug)]
pub struct RecentProjectEntry {
@@ -354,7 +357,6 @@ pub fn init(cx: &mut App) {
.update(cx, |multi_workspace, window, cx| {
let sibling_workspace_ids: HashSet<WorkspaceId> = multi_workspace
.workspaces()
- .iter()
.filter_map(|ws| ws.read(cx).database_id())
.collect();
@@ -472,11 +474,12 @@ pub fn init(cx: &mut App) {
cx.subscribe_in(
workspace.project(),
window,
- move |_, project, event, window, cx| {
+ move |workspace, project, event, window, cx| {
if let project::Event::WorktreeUpdatedEntries(worktree_id, updated_entries) =
event
{
dev_container_suggest::suggest_on_worktree_updated(
+ workspace,
*worktree_id,
updated_entries,
project,
@@ -684,6 +687,79 @@ impl RecentProjects {
}
});
}
+
+ fn handle_remove_selected(
+ &mut self,
+ _: &RemoveSelected,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.picker.update(cx, |picker, cx| {
+ let ix = picker.delegate.selected_index;
+
+ match picker.delegate.filtered_entries.get(ix) {
+ Some(ProjectPickerEntry::OpenFolder { index, .. }) => {
+ if let Some(folder) = picker.delegate.open_folders.get(*index) {
+ let worktree_id = folder.worktree_id;
+ let Some(workspace) = picker.delegate.workspace.upgrade() else {
+ return;
+ };
+ workspace.update(cx, |workspace, cx| {
+ let project = workspace.project().clone();
+ project.update(cx, |project, cx| {
+ project.remove_worktree(worktree_id, cx);
+ });
+ });
+ picker.delegate.open_folders = get_open_folders(workspace.read(cx), cx);
+ let query = picker.query(cx);
+ picker.update_matches(query, window, cx);
+ }
+ }
+ Some(ProjectPickerEntry::OpenProject(hit)) => {
+ if let Some((workspace_id, ..)) =
+ picker.delegate.workspaces.get(hit.candidate_id)
+ {
+ let workspace_id = *workspace_id;
+ picker
+ .delegate
+ .remove_sibling_workspace(workspace_id, window, cx);
+ let query = picker.query(cx);
+ picker.update_matches(query, window, cx);
+ }
+ }
+ Some(ProjectPickerEntry::RecentProject(_)) => {
+ picker.delegate.delete_recent_project(ix, window, cx);
+ }
+ _ => {}
+ }
+ });
+ }
+
+ fn handle_add_to_workspace(
+ &mut self,
+ _: &AddToWorkspace,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.picker.update(cx, |picker, cx| {
+ let ix = picker.delegate.selected_index;
+
+ if let Some(ProjectPickerEntry::RecentProject(hit)) =
+ picker.delegate.filtered_entries.get(ix)
+ {
+ if let Some((_, location, paths, _)) =
+ picker.delegate.workspaces.get(hit.candidate_id)
+ {
+ if matches!(location, SerializedWorkspaceLocation::Local) {
+ let paths_to_add = paths.paths().to_vec();
+ picker
+ .delegate
+ .add_project_to_workspace(paths_to_add, window, cx);
+ }
+ }
+ }
+ });
+ }
}
impl EventEmitter<DismissEvent> for RecentProjects {}
@@ -699,6 +775,8 @@ impl Render for RecentProjects {
v_flex()
.key_context("RecentProjects")
.on_action(cx.listener(Self::handle_toggle_open_menu))
+ .on_action(cx.listener(Self::handle_remove_selected))
+ .on_action(cx.listener(Self::handle_add_to_workspace))
.w(rems(self.rem_width))
.child(self.picker.clone())
}
@@ -1034,7 +1112,6 @@ impl PickerDelegate for RecentProjectsDelegate {
.update(cx, |multi_workspace, window, cx| {
let workspace = multi_workspace
.workspaces()
- .iter()
.find(|ws| ws.read(cx).database_id() == Some(workspace_id))
.cloned();
if let Some(workspace) = workspace {
@@ -1081,7 +1158,7 @@ impl PickerDelegate for RecentProjectsDelegate {
.update(cx, |multi_workspace, window, cx| {
multi_workspace.open_project(
paths,
- OpenMode::Replace,
+ OpenMode::Activate,
window,
cx,
)
@@ -1364,7 +1441,6 @@ impl PickerDelegate for RecentProjectsDelegate {
)
}
ProjectPickerEntry::RecentProject(hit) => {
- let popover_style = matches!(self.style, ProjectPickerStyle::Popover);
let (_, location, paths, _) = self.workspaces.get(hit.candidate_id)?;
let is_local = matches!(location, SerializedWorkspaceLocation::Local);
let paths_to_add = paths.paths().to_vec();
@@ -1432,28 +1508,26 @@ impl PickerDelegate for RecentProjectsDelegate {
}),
)
})
- .when(popover_style, |this| {
- this.child(
- IconButton::new("open_new_window", IconName::ArrowUpRight)
- .icon_size(IconSize::XSmall)
- .tooltip({
- move |_, cx| {
- Tooltip::for_action_in(
- "Open Project in New Window",
- &menu::SecondaryConfirm,
- &focus_handle,
- cx,
- )
- }
- })
- .on_click(cx.listener(move |this, _event, window, cx| {
- cx.stop_propagation();
- window.prevent_default();
- this.delegate.set_selected_index(ix, window, cx);
- this.delegate.confirm(true, window, cx);
- })),
- )
- })
+ .child(
+ IconButton::new("open_new_window", IconName::ArrowUpRight)
+ .icon_size(IconSize::XSmall)
+ .tooltip({
+ move |_, cx| {
+ Tooltip::for_action_in(
+ "Open Project in New Window",
+ &menu::SecondaryConfirm,
+ &focus_handle,
+ cx,
+ )
+ }
+ })
+ .on_click(cx.listener(move |this, _event, window, cx| {
+ cx.stop_propagation();
+ window.prevent_default();
+ this.delegate.set_selected_index(ix, window, cx);
+ this.delegate.confirm(true, window, cx);
+ })),
+ )
.child(
IconButton::new("delete", IconName::Close)
.icon_size(IconSize::Small)
@@ -1518,9 +1592,7 @@ impl PickerDelegate for RecentProjectsDelegate {
.border_t_1()
.border_color(cx.theme().colors().border_variant)
.child({
- let open_action = workspace::Open {
- create_new_window: self.create_new_window,
- };
+ let open_action = workspace::Open::default();
Button::new("open_local_folder", "Open Local Project")
.key_binding(KeyBinding::for_action_in(&open_action, &focus_handle, cx))
.on_click(move |_, window, cx| {
@@ -1551,6 +1623,44 @@ impl PickerDelegate for RecentProjectsDelegate {
);
}
+ let selected_entry = self.filtered_entries.get(self.selected_index);
+
+ let secondary_footer_actions: Option<AnyElement> = match selected_entry {
+ Some(ProjectPickerEntry::OpenFolder { .. } | ProjectPickerEntry::OpenProject(_)) => {
+ let label = if matches!(selected_entry, Some(ProjectPickerEntry::OpenFolder { .. }))
+ {
+ "Remove Folder"
+ } else {
+ "Remove from Window"
+ };
+ Some(
+ Button::new("remove_selected", label)
+ .key_binding(KeyBinding::for_action_in(
+ &RemoveSelected,
+ &focus_handle,
+ cx,
+ ))
+ .on_click(|_, window, cx| {
+ window.dispatch_action(RemoveSelected.boxed_clone(), cx)
+ })
+ .into_any_element(),
+ )
+ }
+ Some(ProjectPickerEntry::RecentProject(_)) => Some(
+ Button::new("delete_recent", "Delete")
+ .key_binding(KeyBinding::for_action_in(
+ &RemoveSelected,
+ &focus_handle,
+ cx,
+ ))
+ .on_click(|_, window, cx| {
+ window.dispatch_action(RemoveSelected.boxed_clone(), cx)
+ })
+ .into_any_element(),
+ ),
+ _ => None,
+ };
+
Some(
h_flex()
.flex_1()
@@ -1559,6 +1669,9 @@ impl PickerDelegate for RecentProjectsDelegate {
.justify_end()
.border_t_1()
.border_color(cx.theme().colors().border_variant)
+ .when_some(secondary_footer_actions, |this, actions| {
+ this.child(actions)
+ })
.map(|this| {
if is_already_open_entry {
this.child(
@@ -1607,7 +1720,7 @@ impl PickerDelegate for RecentProjectsDelegate {
y: px(-2.0),
})
.trigger(
- Button::new("actions-trigger", "Actions…")
+ Button::new("actions-trigger", "Actions")
.selected_style(ButtonStyle::Tinted(TintColor::Accent))
.key_binding(KeyBinding::for_action_in(
&ToggleActionsMenu,
@@ -1617,16 +1730,32 @@ impl PickerDelegate for RecentProjectsDelegate {
)
.menu({
let focus_handle = focus_handle.clone();
- let create_new_window = self.create_new_window;
+ let show_add_to_workspace = match selected_entry {
+ Some(ProjectPickerEntry::RecentProject(hit)) => self
+ .workspaces
+ .get(hit.candidate_id)
+ .map(|(_, loc, ..)| {
+ matches!(loc, SerializedWorkspaceLocation::Local)
+ })
+ .unwrap_or(false),
+ _ => false,
+ };
move |window, cx| {
Some(ContextMenu::build(window, cx, {
let focus_handle = focus_handle.clone();
move |menu, _, _| {
menu.context(focus_handle)
+ .when(show_add_to_workspace, |menu| {
+ menu.action(
+ "Add to Workspace",
+ AddToWorkspace.boxed_clone(),
+ )
+ .separator()
+ })
.action(
"Open Local Project",
- workspace::Open { create_new_window }.boxed_clone(),
+ workspace::Open::default().boxed_clone(),
)
.action(
"Open Remote Project",
@@ -1801,7 +1930,6 @@ impl RecentProjectsDelegate {
.update(cx, |multi_workspace, window, cx| {
let workspace = multi_workspace
.workspaces()
- .iter()
.find(|ws| ws.read(cx).database_id() == Some(workspace_id))
.cloned();
if let Some(workspace) = workspace {
@@ -1872,7 +2000,7 @@ mod tests {
use std::path::PathBuf;
use editor::Editor;
- use gpui::{TestAppContext, UpdateGlobal, WindowHandle};
+ use gpui::{TestAppContext, UpdateGlobal, VisualTestContext, WindowHandle};
use serde_json::json;
use settings::SettingsStore;
@@ -1924,6 +2052,11 @@ mod tests {
assert_eq!(cx.update(|cx| cx.windows().len()), 1);
let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::<MultiWorkspace>().unwrap());
+ multi_workspace
+ .update(cx, |multi_workspace, _, cx| {
+ multi_workspace.open_sidebar(cx);
+ })
+ .unwrap();
multi_workspace
.update(cx, |multi_workspace, _, cx| {
assert!(!multi_workspace.workspace().read(cx).is_edited())
@@ -1991,14 +2124,12 @@ mod tests {
cx.dispatch_action(*multi_workspace, menu::Confirm);
cx.run_until_parked();
- // prepare_to_close triggers a save prompt for the dirty buffer.
- // Choose "Don't Save" (index 2) to discard and continue replacing.
+ // In multi-workspace mode, the dirty workspace is kept and a new one is
+ // opened alongside it — no save prompt needed.
assert!(
- cx.has_pending_prompt(),
- "Should prompt to save dirty buffer before replacing workspace"
+ !cx.has_pending_prompt(),
+ "Should not prompt in multi-workspace mode — dirty workspace is kept"
);
- cx.simulate_prompt_answer("Don't Save");
- cx.run_until_parked();
multi_workspace
.update(cx, |multi_workspace, _, cx| {
@@ -2012,8 +2143,8 @@ mod tests {
);
assert!(
- !multi_workspace.workspaces().contains(&dirty_workspace),
- "The original dirty workspace should have been replaced"
+ multi_workspace.workspaces().any(|w| w == &dirty_workspace),
+ "The dirty workspace should still be present in multi-workspace mode"
);
assert!(
@@ -2113,6 +2244,71 @@ mod tests {
.unwrap();
}
+ #[gpui::test]
+ async fn test_dev_container_modal_not_dismissed_on_backdrop_click(cx: &mut TestAppContext) {
+ let app_state = init_test(cx);
+
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ path!("/project"),
+ json!({
+ ".devcontainer": {
+ "devcontainer.json": "{}"
+ },
+ "src": {
+ "main.rs": "fn main() {}"
+ }
+ }),
+ )
+ .await;
+
+ cx.update(|cx| {
+ open_paths(
+ &[PathBuf::from(path!("/project"))],
+ app_state,
+ workspace::OpenOptions::default(),
+ cx,
+ )
+ })
+ .await
+ .unwrap();
+
+ assert_eq!(cx.update(|cx| cx.windows().len()), 1);
+ let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::<MultiWorkspace>().unwrap());
+
+ cx.run_until_parked();
+
+ cx.dispatch_action(*multi_workspace, OpenDevContainer);
+
+ multi_workspace
+ .update(cx, |multi_workspace, _, cx| {
+ assert!(
+ multi_workspace
+ .active_modal::<RemoteServerProjects>(cx)
+ .is_some(),
+ "Dev container modal should be open"
+ );
+ })
+ .unwrap();
+
+ // Click outside the modal (on the backdrop) to try to dismiss it
+ let mut vcx = VisualTestContext::from_window(*multi_workspace, cx);
+ vcx.simulate_click(gpui::point(px(1.0), px(1.0)), gpui::Modifiers::default());
+
+ multi_workspace
+ .update(cx, |multi_workspace, _, cx| {
+ assert!(
+ multi_workspace
+ .active_modal::<RemoteServerProjects>(cx)
+ .is_some(),
+ "Dev container modal should remain open during creation"
+ );
+ })
+ .unwrap();
+ }
+
#[gpui::test]
async fn test_open_dev_container_action_with_multiple_configs(cx: &mut TestAppContext) {
let app_state = init_test(cx);
@@ -54,7 +54,7 @@ use util::{
rel_path::RelPath,
};
use workspace::{
- AppState, ModalView, MultiWorkspace, OpenLog, OpenOptions, Toast, Workspace,
+ AppState, DismissDecision, ModalView, MultiWorkspace, OpenLog, OpenOptions, Toast, Workspace,
notifications::{DetachAndPromptErr, NotificationId},
open_remote_project_with_existing_connection,
};
@@ -69,6 +69,7 @@ pub struct RemoteServerProjects {
create_new_window: bool,
dev_container_picker: Option<Entity<Picker<DevContainerPickerDelegate>>>,
_subscription: Subscription,
+ allow_dismissal: bool,
}
struct CreateRemoteServer {
@@ -920,6 +921,7 @@ impl RemoteServerProjects {
create_new_window,
dev_container_picker: None,
_subscription,
+ allow_dismissal: true,
}
}
@@ -1140,6 +1142,7 @@ impl RemoteServerProjects {
}
fn view_in_progress_dev_container(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ self.allow_dismissal = false;
self.mode = Mode::CreateRemoteDevContainer(CreateRemoteDevContainer::new(
DevContainerCreationProgress::Creating,
cx,
@@ -1309,6 +1312,7 @@ impl RemoteServerProjects {
cx.emit(DismissEvent);
}
_ => {
+ self.allow_dismissal = true;
self.mode = Mode::default_mode(&self.ssh_config_servers, cx);
self.focus_handle(cx).focus(window, cx);
cx.notify();
@@ -1875,6 +1879,7 @@ impl RemoteServerProjects {
.ok();
entity
.update_in(cx, |remote_server_projects, window, cx| {
+ remote_server_projects.allow_dismissal = true;
remote_server_projects.mode =
Mode::CreateRemoteDevContainer(CreateRemoteDevContainer::new(
DevContainerCreationProgress::Error(format!("{e}")),
@@ -1897,7 +1902,8 @@ impl RemoteServerProjects {
.log_err();
entity
- .update(cx, |_, cx| {
+ .update(cx, |this, cx| {
+ this.allow_dismissal = true;
cx.emit(DismissEvent);
})
.log_err();
@@ -2948,7 +2954,15 @@ fn get_text(element: &Entity<Editor>, cx: &mut App) -> String {
element.read(cx).text(cx).trim().to_string()
}
-impl ModalView for RemoteServerProjects {}
+impl ModalView for RemoteServerProjects {
+ fn on_before_dismiss(
+ &mut self,
+ _window: &mut Window,
+ _cx: &mut Context<Self>,
+ ) -> DismissDecision {
+ DismissDecision::Dismiss(self.allow_dismissal)
+ }
+}
impl Focusable for RemoteServerProjects {
fn focus_handle(&self, cx: &App) -> FocusHandle {
@@ -21,6 +21,8 @@ use workspace::{
WorkspaceDb, WorkspaceId, notifications::DetachAndPromptErr,
};
+use zed_actions::OpenRemote;
+
use crate::{highlights_for_path, icon_for_remote_connection, open_remote_project};
pub struct SidebarRecentProjects {
@@ -412,13 +414,35 @@ impl PickerDelegate for SidebarRecentProjectsDelegate {
let open_action = workspace::Open {
create_new_window: false,
};
+
Button::new("open_local_folder", "Add Local Project")
.key_binding(KeyBinding::for_action_in(&open_action, &focus_handle, cx))
.on_click(cx.listener(move |_, _, window, cx| {
+ window.dispatch_action(open_action.boxed_clone(), cx);
cx.emit(DismissEvent);
- window.dispatch_action(open_action.boxed_clone(), cx)
}))
})
+ .child(
+ Button::new("open_remote_folder", "Add Remote Project")
+ .key_binding(KeyBinding::for_action(
+ &OpenRemote {
+ from_existing_connection: false,
+ create_new_window: false,
+ },
+ cx,
+ ))
+ .on_click(cx.listener(|_, _, window, cx| {
+ window.dispatch_action(
+ OpenRemote {
+ from_existing_connection: false,
+ create_new_window: false,
+ }
+ .boxed_clone(),
+ cx,
+ );
+ cx.emit(DismissEvent);
+ })),
+ )
.into_any(),
)
}
@@ -246,7 +246,7 @@ impl WslOpenModal {
false => !secondary,
};
let open_mode = if replace_current_window {
- workspace::OpenMode::Replace
+ workspace::OpenMode::Activate
} else {
workspace::OpenMode::NewWindow
};
@@ -1273,7 +1273,7 @@ impl ConnectionPool {
}
}
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub enum RemoteConnectionOptions {
Ssh(SshConnectionOptions),
Wsl(WslConnectionOptions),
@@ -1285,7 +1285,10 @@ pub enum RemoteConnectionOptions {
impl RemoteConnectionOptions {
pub fn display_name(&self) -> String {
match self {
- RemoteConnectionOptions::Ssh(opts) => opts.host.to_string(),
+ RemoteConnectionOptions::Ssh(opts) => opts
+ .nickname
+ .clone()
+ .unwrap_or_else(|| opts.host.to_string()),
RemoteConnectionOptions::Wsl(opts) => opts.distro_name.clone(),
RemoteConnectionOptions::Docker(opts) => {
if opts.use_podman {
@@ -1300,6 +1303,32 @@ impl RemoteConnectionOptions {
}
}
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_ssh_display_name_prefers_nickname() {
+ let options = RemoteConnectionOptions::Ssh(SshConnectionOptions {
+ host: "1.2.3.4".into(),
+ nickname: Some("My Cool Project".to_string()),
+ ..Default::default()
+ });
+
+ assert_eq!(options.display_name(), "My Cool Project");
+ }
+
+ #[test]
+ fn test_ssh_display_name_falls_back_to_host() {
+ let options = RemoteConnectionOptions::Ssh(SshConnectionOptions {
+ host: "1.2.3.4".into(),
+ ..Default::default()
+ });
+
+ assert_eq!(options.display_name(), "1.2.3.4");
+ }
+}
+
impl From<SshConnectionOptions> for RemoteConnectionOptions {
fn from(opts: SshConnectionOptions) -> Self {
RemoteConnectionOptions::Ssh(opts)
@@ -30,7 +30,18 @@ use crate::{
transport::parse_platform,
};
-#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
+#[derive(
+ Debug,
+ Default,
+ Clone,
+ PartialEq,
+ Eq,
+ Hash,
+ PartialOrd,
+ Ord,
+ serde::Serialize,
+ serde::Deserialize,
+)]
pub struct DockerConnectionOptions {
pub name: String,
pub container_id: String,
@@ -56,7 +56,7 @@ use std::{
use util::paths::{PathStyle, RemotePathBuf};
/// Unique identifier for a mock connection.
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub struct MockConnectionOptions {
pub id: u64,
}
@@ -45,7 +45,7 @@ pub(crate) struct SshRemoteConnection {
_temp_dir: TempDir,
}
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub enum SshConnectionHost {
IpAddr(IpAddr),
Hostname(String),
@@ -102,7 +102,7 @@ fn bracket_ipv6(host: &str) -> String {
}
}
-#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Default, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
pub struct SshConnectionOptions {
pub host: SshConnectionHost,
pub username: Option<String>,
@@ -28,7 +28,9 @@ use util::{
shell_builder::ShellBuilder,
};
-#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Deserialize, schemars::JsonSchema)]
+#[derive(
+ Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize, schemars::JsonSchema,
+)]
pub struct WslConnectionOptions {
pub distro_name: String,
pub user: Option<String>,
@@ -191,6 +191,7 @@ impl HeadlessProject {
worktree_store.clone(),
toolchain_store.read(cx).as_language_toolchain_store(),
environment.clone(),
+ git_store.clone(),
cx,
);
task_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
@@ -11,6 +11,7 @@ use languages::rust_lang;
use extension::ExtensionHostProxy;
use fs::{FakeFs, Fs};
+use git::repository::Worktree as GitWorktree;
use gpui::{AppContext as _, Entity, SharedString, TestAppContext};
use http_client::{BlockedHttpClient, FakeHttpClient};
use language::{
@@ -1539,6 +1540,87 @@ async fn test_copy_file_into_remote_project(
);
}
+#[gpui::test]
+async fn test_remote_root_repo_common_dir(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
+ let fs = FakeFs::new(server_cx.executor());
+ fs.insert_tree(
+ "/code",
+ json!({
+ "main_repo": {
+ ".git": {},
+ "file.txt": "content",
+ },
+ "no_git": {
+ "file.txt": "content",
+ },
+ }),
+ )
+ .await;
+
+ // Create a linked worktree that points back to main_repo's .git.
+ fs.add_linked_worktree_for_repo(
+ Path::new("/code/main_repo/.git"),
+ false,
+ GitWorktree {
+ path: PathBuf::from("/code/linked_worktree"),
+ ref_name: Some("refs/heads/feature-branch".into()),
+ sha: "abc123".into(),
+ is_main: false,
+ },
+ )
+ .await;
+
+ let (project, _headless) = init_test(&fs, cx, server_cx).await;
+
+ // Main repo: root_repo_common_dir should be the .git directory itself.
+ let (worktree_main, _) = project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree("/code/main_repo", true, cx)
+ })
+ .await
+ .unwrap();
+ cx.executor().run_until_parked();
+
+ let common_dir = worktree_main.read_with(cx, |worktree, _| {
+ worktree.snapshot().root_repo_common_dir().cloned()
+ });
+ assert_eq!(
+ common_dir.as_deref(),
+ Some(Path::new("/code/main_repo/.git")),
+ );
+
+ // Linked worktree: root_repo_common_dir should point to the main repo's .git.
+ let (worktree_linked, _) = project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree("/code/linked_worktree", true, cx)
+ })
+ .await
+ .unwrap();
+ cx.executor().run_until_parked();
+
+ let common_dir = worktree_linked.read_with(cx, |worktree, _| {
+ worktree.snapshot().root_repo_common_dir().cloned()
+ });
+ assert_eq!(
+ common_dir.as_deref(),
+ Some(Path::new("/code/main_repo/.git")),
+ );
+
+ // No git repo: root_repo_common_dir should be None.
+ let (worktree_no_git, _) = project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree("/code/no_git", true, cx)
+ })
+ .await
+ .unwrap();
+ cx.executor().run_until_parked();
+
+ let common_dir = worktree_no_git.read_with(cx, |worktree, _| {
+ worktree.snapshot().root_repo_common_dir().cloned()
+ });
+ assert_eq!(common_dir, None);
+}
+
#[gpui::test]
async fn test_remote_git_diffs(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
let text_2 = "
@@ -1917,6 +1999,153 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA
assert_eq!(server_branch.name(), "totally-new-branch");
}
+#[gpui::test]
+async fn test_remote_git_checkpoints(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
+ let fs = FakeFs::new(server_cx.executor());
+ fs.insert_tree(
+ path!("/code"),
+ json!({
+ "project1": {
+ ".git": {},
+ "file.txt": "original content",
+ },
+ }),
+ )
+ .await;
+
+ let (project, _headless) = init_test(&fs, cx, server_cx).await;
+
+ let (_worktree, _) = project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree(path!("/code/project1"), true, cx)
+ })
+ .await
+ .unwrap();
+ cx.run_until_parked();
+
+ let repository = project.update(cx, |project, cx| project.active_repository(cx).unwrap());
+
+ // 1. Create a checkpoint of the original state
+ let checkpoint_1 = repository
+ .update(cx, |repository, _| repository.checkpoint())
+ .await
+ .unwrap()
+ .unwrap();
+
+ // 2. Modify a file on the server-side fs
+ fs.write(
+ Path::new(path!("/code/project1/file.txt")),
+ b"modified content",
+ )
+ .await
+ .unwrap();
+
+ // 3. Create a second checkpoint with the modified state
+ let checkpoint_2 = repository
+ .update(cx, |repository, _| repository.checkpoint())
+ .await
+ .unwrap()
+ .unwrap();
+
+ // 4. compare_checkpoints: same checkpoint with itself => equal
+ let equal = repository
+ .update(cx, |repository, _| {
+ repository.compare_checkpoints(checkpoint_1.clone(), checkpoint_1.clone())
+ })
+ .await
+ .unwrap()
+ .unwrap();
+ assert!(equal, "a checkpoint compared with itself should be equal");
+
+ // 5. compare_checkpoints: different states => not equal
+ let equal = repository
+ .update(cx, |repository, _| {
+ repository.compare_checkpoints(checkpoint_1.clone(), checkpoint_2.clone())
+ })
+ .await
+ .unwrap()
+ .unwrap();
+ assert!(
+ !equal,
+ "checkpoints of different states should not be equal"
+ );
+
+ // 6. diff_checkpoints: same checkpoint => empty diff
+ let diff = repository
+ .update(cx, |repository, _| {
+ repository.diff_checkpoints(checkpoint_1.clone(), checkpoint_1.clone())
+ })
+ .await
+ .unwrap()
+ .unwrap();
+ assert!(
+ diff.is_empty(),
+ "diff of identical checkpoints should be empty"
+ );
+
+ // 7. diff_checkpoints: different checkpoints => non-empty diff mentioning the changed file
+ let diff = repository
+ .update(cx, |repository, _| {
+ repository.diff_checkpoints(checkpoint_1.clone(), checkpoint_2.clone())
+ })
+ .await
+ .unwrap()
+ .unwrap();
+ assert!(
+ !diff.is_empty(),
+ "diff of different checkpoints should be non-empty"
+ );
+ assert!(
+ diff.contains("file.txt"),
+ "diff should mention the changed file"
+ );
+ assert!(
+ diff.contains("original content"),
+ "diff should contain removed content"
+ );
+ assert!(
+ diff.contains("modified content"),
+ "diff should contain added content"
+ );
+
+ // 8. restore_checkpoint: restore to original state
+ repository
+ .update(cx, |repository, _| {
+ repository.restore_checkpoint(checkpoint_1.clone())
+ })
+ .await
+ .unwrap()
+ .unwrap();
+ cx.run_until_parked();
+
+ // 9. Create a checkpoint after restore
+ let checkpoint_3 = repository
+ .update(cx, |repository, _| repository.checkpoint())
+ .await
+ .unwrap()
+ .unwrap();
+
+ // 10. compare_checkpoints: restored state matches original
+ let equal = repository
+ .update(cx, |repository, _| {
+ repository.compare_checkpoints(checkpoint_1.clone(), checkpoint_3.clone())
+ })
+ .await
+ .unwrap()
+ .unwrap();
+ assert!(equal, "restored state should match original checkpoint");
+
+ // 11. diff_checkpoints: restored state vs original => empty diff
+ let diff = repository
+ .update(cx, |repository, _| {
+ repository.diff_checkpoints(checkpoint_1.clone(), checkpoint_3.clone())
+ })
+ .await
+ .unwrap()
+ .unwrap();
+ assert!(diff.is_empty(), "diff after restore should be empty");
+}
+
#[gpui::test]
async fn test_remote_agent_fs_tool_calls(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
let fs = FakeFs::new(server_cx.executor());
@@ -177,6 +177,13 @@ impl PythonEnvKernelSpecification {
kernelspec: self.kernelspec.clone(),
}
}
+
+ pub fn is_uv(&self) -> bool {
+ matches!(
+ self.environment_kind.as_deref(),
+ Some("uv" | "uv (Workspace)")
+ )
+ }
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -87,6 +87,7 @@ pub fn install_ipykernel_and_assign(
let python_path = env_spec.path.clone();
let env_name = env_spec.name.clone();
+ let is_uv = env_spec.is_uv();
let env_spec = env_spec.clone();
struct IpykernelInstall;
@@ -109,11 +110,25 @@ pub fn install_ipykernel_and_assign(
let window_handle = window.window_handle();
let install_task = cx.background_spawn(async move {
- let output = util::command::new_command(python_path.to_string_lossy().as_ref())
- .args(&["-m", "pip", "install", "ipykernel"])
- .output()
- .await
- .context("failed to run pip install ipykernel")?;
+ let output = if is_uv {
+ util::command::new_command("uv")
+ .args(&[
+ "pip",
+ "install",
+ "ipykernel",
+ "--python",
+ &python_path.to_string_lossy(),
+ ])
+ .output()
+ .await
+ .context("failed to run uv pip install ipykernel")?
+ } else {
+ util::command::new_command(python_path.to_string_lossy().as_ref())
+ .args(&["-m", "pip", "install", "ipykernel"])
+ .output()
+ .await
+ .context("failed to run pip install ipykernel")?
+ };
if output.status.success() {
anyhow::Ok(())
@@ -146,6 +161,11 @@ pub fn install_ipykernel_and_assign(
window_handle
.update(cx, |_, window, cx| {
+ let store = ReplStore::global(cx);
+ store.update(cx, |store, cx| {
+ store.mark_ipykernel_installed(cx, &env_spec);
+ });
+
let updated_spec =
KernelSpecification::PythonEnv(PythonEnvKernelSpecification {
has_ipykernel: true,
@@ -13,8 +13,8 @@ use settings::{Settings, SettingsStore};
use util::rel_path::RelPath;
use crate::kernels::{
- Kernel, list_remote_kernelspecs, local_kernel_specifications, python_env_kernel_specifications,
- wsl_kernel_specifications,
+ Kernel, PythonEnvKernelSpecification, list_remote_kernelspecs, local_kernel_specifications,
+ python_env_kernel_specifications, wsl_kernel_specifications,
};
use crate::{JupyterSettings, KernelSpecification, Session};
@@ -136,6 +136,23 @@ impl ReplStore {
cx.notify();
}
+ pub fn mark_ipykernel_installed(
+ &mut self,
+ cx: &mut Context<Self>,
+ spec: &PythonEnvKernelSpecification,
+ ) {
+ for specs in self.kernel_specifications_for_worktree.values_mut() {
+ for kernel_spec in specs.iter_mut() {
+ if let KernelSpecification::PythonEnv(env_spec) = kernel_spec {
+ if env_spec == spec {
+ env_spec.has_ipykernel = true;
+ }
+ }
+ }
+ }
+ cx.notify();
+ }
+
pub fn refresh_python_kernelspecs(
&mut self,
worktree_id: WorktreeId,
@@ -225,6 +225,10 @@ impl PickerDelegate for RulePickerDelegate {
}
}
+ fn select_on_hover(&self) -> bool {
+ false
+ }
+
fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
"Search…".into()
}
@@ -114,81 +114,23 @@ impl Render for BufferSearchBar {
.map(|splittable_editor| {
let editor_ref = splittable_editor.read(cx);
let diff_view_style = editor_ref.diff_view_style();
- let is_split = editor_ref.is_split();
+
+ let is_split_set = diff_view_style == DiffViewStyle::Split;
+ let is_split_active = editor_ref.is_split();
let min_columns =
EditorSettings::get_global(cx).minimum_split_diff_width as u32;
- let mut split_button = IconButton::new("diff-split", IconName::DiffSplit)
- .shape(IconButtonShape::Square)
- .tooltip(Tooltip::element(move |_, cx| {
- let message = if min_columns == 0 {
- SharedString::from("Split")
- } else {
- format!("Split when wider than {} columns", min_columns).into()
- };
-
- v_flex()
- .child(message)
- .child(
- h_flex()
- .gap_0p5()
- .text_ui_sm(cx)
- .text_color(Color::Muted.color(cx))
- .children(render_modifiers(
- &gpui::Modifiers::secondary_key(),
- PlatformStyle::platform(),
- None,
- Some(TextSize::Small.rems(cx).into()),
- false,
- ))
- .child("click to change min width"),
- )
- .into_any()
- }))
- .on_click({
- let splittable_editor = splittable_editor.downgrade();
- move |_, window, cx| {
- if window.modifiers().secondary() {
- window.dispatch_action(
- OpenSettingsAt {
- path: "minimum_split_diff_width".to_string(),
- }
- .boxed_clone(),
- cx,
- );
- } else {
- update_settings_file(
- <dyn Fs>::global(cx),
- cx,
- |settings, _| {
- settings.editor.diff_view_style =
- Some(DiffViewStyle::Split);
- },
- );
- if diff_view_style == DiffViewStyle::Unified {
- splittable_editor
- .update(cx, |editor, cx| {
- editor.toggle_split(&ToggleSplitDiff, window, cx);
- })
- .ok();
- }
- }
- }
- });
-
- if diff_view_style == DiffViewStyle::Split {
- if !is_split {
- split_button = split_button.icon_color(Color::Disabled)
- } else {
- split_button = split_button.toggle_state(true)
- }
- }
+ let split_icon = if is_split_set && !is_split_active {
+ IconName::DiffSplitAuto
+ } else {
+ IconName::DiffSplit
+ };
h_flex()
.gap_1()
.child(
IconButton::new("diff-unified", IconName::DiffUnified)
- .shape(IconButtonShape::Square)
+ .icon_size(IconSize::Small)
.toggle_state(diff_view_style == DiffViewStyle::Unified)
.tooltip(Tooltip::text("Unified"))
.on_click({
@@ -216,7 +158,71 @@ impl Render for BufferSearchBar {
}
}),
)
- .child(split_button)
+ .child(
+ IconButton::new("diff-split", split_icon)
+ .toggle_state(diff_view_style == DiffViewStyle::Split)
+ .icon_size(IconSize::Small)
+ .tooltip(Tooltip::element(move |_, cx| {
+ let message = if is_split_set && !is_split_active {
+ format!("Split when wider than {} columns", min_columns)
+ .into()
+ } else {
+ SharedString::from("Split")
+ };
+
+ v_flex()
+ .child(message)
+ .child(
+ h_flex()
+ .gap_0p5()
+ .text_ui_sm(cx)
+ .text_color(Color::Muted.color(cx))
+ .children(render_modifiers(
+ &gpui::Modifiers::secondary_key(),
+ PlatformStyle::platform(),
+ None,
+ Some(TextSize::Small.rems(cx).into()),
+ false,
+ ))
+ .child("click to change min width"),
+ )
+ .into_any()
+ }))
+ .on_click({
+ let splittable_editor = splittable_editor.downgrade();
+ move |_, window, cx| {
+ if window.modifiers().secondary() {
+ window.dispatch_action(
+ OpenSettingsAt {
+ path: "minimum_split_diff_width".to_string(),
+ }
+ .boxed_clone(),
+ cx,
+ );
+ } else {
+ update_settings_file(
+ <dyn Fs>::global(cx),
+ cx,
+ |settings, _| {
+ settings.editor.diff_view_style =
+ Some(DiffViewStyle::Split);
+ },
+ );
+ if diff_view_style == DiffViewStyle::Unified {
+ splittable_editor
+ .update(cx, |editor, cx| {
+ editor.toggle_split(
+ &ToggleSplitDiff,
+ window,
+ cx,
+ );
+ })
+ .ok();
+ }
+ }
+ }
+ }),
+ )
})
} else {
None
@@ -240,7 +246,7 @@ impl Render for BufferSearchBar {
let collapse_expand_icon_button = |id| {
IconButton::new(id, icon)
- .shape(IconButtonShape::Square)
+ .icon_size(IconSize::Small)
.tooltip(move |_, cx| {
Tooltip::for_action_in(
tooltip_label,
@@ -285,6 +291,7 @@ impl Render for BufferSearchBar {
regex,
replacement,
selection,
+ select_all,
find_in_results,
} = self.supported_options(cx);
@@ -455,14 +462,16 @@ impl Render for BufferSearchBar {
))
});
- el.child(render_action_button(
- "buffer-search-nav-button",
- IconName::SelectAll,
- Default::default(),
- "Select All Matches",
- &SelectAllMatches,
- query_focus,
- ))
+ el.when(select_all, |el| {
+ el.child(render_action_button(
+ "buffer-search-nav-button",
+ IconName::SelectAll,
+ Default::default(),
+ "Select All Matches",
+ &SelectAllMatches,
+ query_focus.clone(),
+ ))
+ })
.child(matches_column)
})
.when(find_in_results, |el| {
@@ -3400,17 +3409,15 @@ mod tests {
assert_eq!(initial_location, ToolbarItemLocation::Secondary);
- let mut events = cx.events(&search_bar);
+ let mut events = cx.events::<ToolbarItemEvent, BufferSearchBar>(&search_bar);
search_bar.update_in(cx, |search_bar, window, cx| {
search_bar.dismiss(&Dismiss, window, cx);
});
assert_eq!(
- events.try_next().unwrap(),
- Some(ToolbarItemEvent::ChangeLocation(
- ToolbarItemLocation::Hidden
- ))
+ events.try_recv().unwrap(),
+ (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Hidden))
);
search_bar.update_in(cx, |search_bar, window, cx| {
@@ -3418,10 +3425,8 @@ mod tests {
});
assert_eq!(
- events.try_next().unwrap(),
- Some(ToolbarItemEvent::ChangeLocation(
- ToolbarItemLocation::Secondary
- ))
+ events.try_recv().unwrap(),
+ (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Secondary))
);
}
@@ -3436,17 +3441,15 @@ mod tests {
assert_eq!(initial_location, ToolbarItemLocation::PrimaryLeft);
- let mut events = cx.events(&search_bar);
+ let mut events = cx.events::<ToolbarItemEvent, BufferSearchBar>(&search_bar);
search_bar.update_in(cx, |search_bar, window, cx| {
search_bar.dismiss(&Dismiss, window, cx);
});
assert_eq!(
- events.try_next().unwrap(),
- Some(ToolbarItemEvent::ChangeLocation(
- ToolbarItemLocation::PrimaryLeft
- ))
+ events.try_recv().unwrap(),
+ (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::PrimaryLeft))
);
search_bar.update_in(cx, |search_bar, window, cx| {
@@ -3454,10 +3457,8 @@ mod tests {
});
assert_eq!(
- events.try_next().unwrap(),
- Some(ToolbarItemEvent::ChangeLocation(
- ToolbarItemLocation::PrimaryLeft
- ))
+ events.try_recv().unwrap(),
+ (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::PrimaryLeft))
);
}
@@ -3476,17 +3477,15 @@ mod tests {
assert_eq!(initial_location, ToolbarItemLocation::Hidden);
- let mut events = cx.events(&search_bar);
+ let mut events = cx.events::<ToolbarItemEvent, BufferSearchBar>(&search_bar);
search_bar.update_in(cx, |search_bar, window, cx| {
search_bar.dismiss(&Dismiss, window, cx);
});
assert_eq!(
- events.try_next().unwrap(),
- Some(ToolbarItemEvent::ChangeLocation(
- ToolbarItemLocation::Hidden
- ))
+ events.try_recv().unwrap(),
+ (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Hidden))
);
search_bar.update_in(cx, |search_bar, window, cx| {
@@ -3494,10 +3493,8 @@ mod tests {
});
assert_eq!(
- events.try_next().unwrap(),
- Some(ToolbarItemEvent::ChangeLocation(
- ToolbarItemLocation::Secondary
- ))
+ events.try_recv().unwrap(),
+ (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Secondary))
);
}
@@ -3550,7 +3547,16 @@ mod tests {
// Manually unfold one buffer (simulating a chevron click)
let first_buffer_id = editor.read_with(cx, |editor, cx| {
- editor.buffer().read(cx).excerpt_buffer_ids()[0]
+ editor
+ .buffer()
+ .read(cx)
+ .snapshot(cx)
+ .excerpts()
+ .nth(0)
+ .unwrap()
+ .context
+ .start
+ .buffer_id
});
editor.update_in(cx, |editor, _window, cx| {
editor.unfold_buffer(first_buffer_id, cx);
@@ -3564,7 +3570,16 @@ mod tests {
// Manually unfold the second buffer too
let second_buffer_id = editor.read_with(cx, |editor, cx| {
- editor.buffer().read(cx).excerpt_buffer_ids()[1]
+ editor
+ .buffer()
+ .read(cx)
+ .snapshot(cx)
+ .excerpts()
+ .nth(1)
+ .unwrap()
+ .context
+ .start
+ .buffer_id
});
editor.update_in(cx, |editor, _window, cx| {
editor.unfold_buffer(second_buffer_id, cx);
@@ -11,8 +11,8 @@ use crate::{
use anyhow::Context as _;
use collections::HashMap;
use editor::{
- Anchor, Editor, EditorEvent, EditorSettings, ExcerptId, MAX_TAB_TITLE_LEN, MultiBuffer,
- PathKey, SelectionEffects,
+ Anchor, Editor, EditorEvent, EditorSettings, MAX_TAB_TITLE_LEN, MultiBuffer, PathKey,
+ SelectionEffects,
actions::{Backtab, FoldAll, SelectAll, Tab, UnfoldAll},
items::active_match_index,
multibuffer_context_lines,
@@ -342,41 +342,32 @@ impl ProjectSearch {
}
fn remove_deleted_buffers(&mut self, cx: &mut Context<Self>) {
- let (deleted_paths, removed_excerpt_ids) = {
- let excerpts = self.excerpts.read(cx);
- let deleted_paths: Vec<PathKey> = excerpts
- .paths()
- .filter(|path| {
- excerpts.buffer_for_path(path, cx).is_some_and(|buffer| {
- buffer
- .read(cx)
- .file()
- .is_some_and(|file| file.disk_state().is_deleted())
- })
- })
- .cloned()
- .collect();
-
- let removed_excerpt_ids: collections::HashSet<ExcerptId> = deleted_paths
- .iter()
- .flat_map(|path| excerpts.excerpts_for_path(path))
- .collect();
-
- (deleted_paths, removed_excerpt_ids)
- };
+ let deleted_buffer_ids = self
+ .excerpts
+ .read(cx)
+ .all_buffers_iter()
+ .filter(|buffer| {
+ buffer
+ .read(cx)
+ .file()
+ .is_some_and(|file| file.disk_state().is_deleted())
+ })
+ .map(|buffer| buffer.read(cx).remote_id())
+ .collect::<Vec<_>>();
- if deleted_paths.is_empty() {
+ if deleted_buffer_ids.is_empty() {
return;
}
- self.excerpts.update(cx, |excerpts, cx| {
- for path in deleted_paths {
- excerpts.remove_excerpts_for_path(path, cx);
+ let snapshot = self.excerpts.update(cx, |excerpts, cx| {
+ for buffer_id in deleted_buffer_ids {
+ excerpts.remove_excerpts_for_buffer(buffer_id, cx);
}
+ excerpts.snapshot(cx)
});
self.match_ranges
- .retain(|range| !removed_excerpt_ids.contains(&range.start.excerpt_id));
+ .retain(|range| snapshot.anchor_to_buffer_anchor(range.start).is_some());
cx.notify();
}
@@ -2990,7 +2981,13 @@ pub mod tests {
.read(cx)
.buffer()
.read(cx)
- .excerpt_buffer_ids()[0]
+ .snapshot(cx)
+ .excerpts()
+ .next()
+ .unwrap()
+ .context
+ .start
+ .buffer_id
})
.expect("should read buffer ids");
@@ -59,13 +59,13 @@ pub struct ActiveSettingsProfileName(pub String);
impl Global for ActiveSettingsProfileName {}
pub trait UserSettingsContentExt {
- fn for_profile(&self, cx: &App) -> Option<&SettingsContent>;
+ fn for_profile(&self, cx: &App) -> Option<&SettingsProfile>;
fn for_release_channel(&self) -> Option<&SettingsContent>;
fn for_os(&self) -> Option<&SettingsContent>;
}
impl UserSettingsContentExt for UserSettingsContent {
- fn for_profile(&self, cx: &App) -> Option<&SettingsContent> {
+ fn for_profile(&self, cx: &App) -> Option<&SettingsProfile> {
let Some(active_profile) = cx.try_global::<ActiveSettingsProfileName>() else {
return None;
};
@@ -36,8 +36,8 @@ use crate::{
LanguageToSettingsMap, LspSettings, LspSettingsMap, SemanticTokenRules, ThemeName,
UserSettingsContentExt, VsCodeSettings, WorktreeId,
settings_content::{
- ExtensionsSettingsContent, ProjectSettingsContent, RootUserSettings, SettingsContent,
- UserSettingsContent, merge_from::MergeFrom,
+ ExtensionsSettingsContent, ProfileBase, ProjectSettingsContent, RootUserSettings,
+ SettingsContent, UserSettingsContent, merge_from::MergeFrom,
},
};
@@ -1210,10 +1210,19 @@ impl SettingsStore {
merged.merge_from_option(self.extension_settings.as_deref());
merged.merge_from_option(self.global_settings.as_deref());
if let Some(user_settings) = self.user_settings.as_ref() {
- merged.merge_from(&user_settings.content);
- merged.merge_from_option(user_settings.for_release_channel());
- merged.merge_from_option(user_settings.for_os());
- merged.merge_from_option(user_settings.for_profile(cx));
+ let active_profile = user_settings.for_profile(cx);
+ let should_merge_user_settings =
+ active_profile.is_none_or(|profile| profile.base == ProfileBase::User);
+
+ if should_merge_user_settings {
+ merged.merge_from(&user_settings.content);
+ merged.merge_from_option(user_settings.for_release_channel());
+ merged.merge_from_option(user_settings.for_os());
+ }
+
+ if let Some(profile) = active_profile {
+ merged.merge_from(&profile.settings);
+ }
}
merged.merge_from_option(self.server_settings.as_deref());
@@ -1431,9 +1440,7 @@ impl std::fmt::Display for InvalidSettingsError {
| InvalidSettingsError::DefaultSettings { message }
| InvalidSettingsError::Tasks { message, .. }
| InvalidSettingsError::Editorconfig { message, .. }
- | InvalidSettingsError::Debug { message, .. } => {
- write!(f, "{message}")
- }
+ | InvalidSettingsError::Debug { message, .. } => write!(f, "{message}"),
}
}
}
@@ -999,6 +999,7 @@ impl VsCodeSettings {
}
}),
zoomed_padding: None,
+ focus_follows_mouse: None,
}
}
@@ -159,10 +159,10 @@ pub struct AgentSettingsContent {
///
/// Default: "primary_screen"
pub notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
- /// Whether to play a sound when the agent has either completed its response, or needs user input.
+ /// When to play a sound when the agent has either completed its response, or needs user input.
///
- /// Default: false
- pub play_sound_when_agent_done: Option<bool>,
+ /// Default: never
+ pub play_sound_when_agent_done: Option<PlaySoundWhenAgentDone>,
/// Whether to display agent edits in single-file editors in addition to the review multibuffer pane.
///
/// Default: true
@@ -209,6 +209,11 @@ pub struct AgentSettingsContent {
///
/// Default: false
pub show_turn_stats: Option<bool>,
+ /// Whether to show the merge conflict indicator in the status bar
+ /// that offers to resolve conflicts using the agent.
+ ///
+ /// Default: true
+ pub show_merge_conflict_indicator: Option<bool>,
/// Per-tool permission rules for granular control over which tool actions
/// require confirmation.
///
@@ -347,6 +352,37 @@ pub enum NotifyWhenAgentWaiting {
Never,
}
+#[derive(
+ Copy,
+ Clone,
+ Default,
+ Debug,
+ Serialize,
+ Deserialize,
+ JsonSchema,
+ MergeFrom,
+ PartialEq,
+ strum::VariantArray,
+ strum::VariantNames,
+)]
+#[serde(rename_all = "snake_case")]
+pub enum PlaySoundWhenAgentDone {
+ #[default]
+ Never,
+ WhenHidden,
+ Always,
+}
+
+impl PlaySoundWhenAgentDone {
+ pub fn should_play(&self, visible: bool) -> bool {
+ match self {
+ PlaySoundWhenAgentDone::Never => false,
+ PlaySoundWhenAgentDone::WhenHidden => !visible,
+ PlaySoundWhenAgentDone::Always => true,
+ }
+ }
+}
+
#[with_fallible_options]
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq)]
pub struct LanguageModelSelection {
@@ -278,6 +278,7 @@ pub struct OpenAiCompatibleAvailableModel {
pub max_tokens: u64,
pub max_output_tokens: Option<u64>,
pub max_completion_tokens: Option<u64>,
+ pub reasoning_effort: Option<OpenAiReasoningEffort>,
#[serde(default)]
pub capabilities: OpenAiCompatibleModelCapabilities,
}
@@ -276,6 +276,18 @@ pub struct SemanticTokenRule {
pub font_style: Option<SemanticTokenFontStyle>,
}
+impl SemanticTokenRule {
+ pub fn no_style_defined(&self) -> bool {
+ self.style.is_empty()
+ && self.foreground_color.is_none()
+ && self.background_color.is_none()
+ && self.underline.is_none()
+ && self.strikethrough.is_none()
+ && self.font_weight.is_none()
+ && self.font_style.is_none()
+ }
+}
+
#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
#[serde(untagged)]
pub enum SemanticTokenColorOverride {
@@ -265,6 +265,35 @@ settings_overrides! {
pub struct PlatformOverrides { macos, linux, windows }
}
+/// Determines what settings a profile starts from before applying its overrides.
+#[derive(
+ Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, MergeFrom,
+)]
+#[serde(rename_all = "snake_case")]
+pub enum ProfileBase {
+ /// Apply profile settings on top of the user's current settings.
+ #[default]
+ User,
+ /// Apply profile settings on top of Zed's default settings, ignoring user customizations.
+ Default,
+}
+
+/// A named settings profile that can temporarily override settings.
+#[with_fallible_options]
+#[derive(Debug, Default, PartialEq, Clone, Serialize, Deserialize, JsonSchema, MergeFrom)]
+pub struct SettingsProfile {
+ /// What base settings to start from before applying this profile's overrides.
+ ///
+ /// - `user`: Apply on top of user's settings (default)
+ /// - `default`: Apply on top of Zed's default settings, ignoring user customizations
+ #[serde(default)]
+ pub base: ProfileBase,
+
+ /// The settings overrides for this profile.
+ #[serde(default)]
+ pub settings: Box<SettingsContent>,
+}
+
#[with_fallible_options]
#[derive(Debug, Default, PartialEq, Clone, Serialize, Deserialize, JsonSchema, MergeFrom)]
pub struct UserSettingsContent {
@@ -278,7 +307,7 @@ pub struct UserSettingsContent {
pub platform_overrides: PlatformOverrides,
#[serde(default)]
- pub profiles: IndexMap<String, SettingsContent>,
+ pub profiles: IndexMap<String, SettingsProfile>,
}
pub struct ExtensionsSettingsContent {
@@ -734,6 +763,7 @@ pub struct VimSettingsContent {
pub toggle_relative_line_numbers: Option<bool>,
pub use_system_clipboard: Option<UseSystemClipboard>,
pub use_smartcase_find: Option<bool>,
+ pub use_regex_search: Option<bool>,
/// When enabled, the `:substitute` command replaces all matches in a line
/// by default. The 'g' flag then toggles this behavior.,
pub gdefault: Option<bool>,
@@ -122,6 +122,9 @@ pub struct WorkspaceSettingsContent {
/// What draws window decorations/titlebar, the client application (Zed) or display server
/// Default: client
pub window_decorations: Option<WindowDecorations>,
+ /// Whether the focused panel follows the mouse location
+ /// Default: false
+ pub focus_follows_mouse: Option<FocusFollowsMouse>,
}
#[with_fallible_options]
@@ -928,3 +931,10 @@ impl DocumentSymbols {
self == &Self::On
}
}
+
+#[with_fallible_options]
+#[derive(Copy, Clone, PartialEq, Default, Serialize, Deserialize, JsonSchema, MergeFrom, Debug)]
+pub struct FocusFollowsMouse {
+ pub enabled: Option<bool>,
+ pub debounce_ms: Option<u64>,
+}
@@ -291,7 +291,7 @@ mod tests {
use zed_actions::settings_profile_selector;
async fn init_test(
- profiles_json: serde_json::Value,
+ user_settings_json: serde_json::Value,
cx: &mut TestAppContext,
) -> (Entity<Workspace>, &mut VisualTestContext) {
cx.update(|cx| {
@@ -307,13 +307,8 @@ mod tests {
cx.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
- let settings_json = json!({
- "buffer_font_size": 10.0,
- "profiles": profiles_json,
- });
-
store
- .set_user_settings(&settings_json.to_string(), cx)
+ .set_user_settings(&user_settings_json.to_string(), cx)
.unwrap();
});
});
@@ -328,7 +323,6 @@ mod tests {
cx.update(|_, cx| {
assert!(!cx.has_global::<ActiveSettingsProfileName>());
- assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(10.0));
});
(workspace, cx)
@@ -354,15 +348,22 @@ mod tests {
let classroom_and_streaming_profile_name = "Classroom / Streaming".to_string();
let demo_videos_profile_name = "Demo Videos".to_string();
- let profiles_json = json!({
- classroom_and_streaming_profile_name.clone(): {
- "buffer_font_size": 20.0,
- },
- demo_videos_profile_name.clone(): {
- "buffer_font_size": 15.0
+ let user_settings_json = json!({
+ "buffer_font_size": 10.0,
+ "profiles": {
+ classroom_and_streaming_profile_name.clone(): {
+ "settings": {
+ "buffer_font_size": 20.0,
+ }
+ },
+ demo_videos_profile_name.clone(): {
+ "settings": {
+ "buffer_font_size": 15.0
+ }
+ }
}
});
- let (workspace, cx) = init_test(profiles_json.clone(), cx).await;
+ let (workspace, cx) = init_test(user_settings_json, cx).await;
cx.dispatch_action(settings_profile_selector::Toggle);
let picker = active_settings_profile_picker(&workspace, cx);
@@ -575,24 +576,134 @@ mod tests {
});
}
+ #[gpui::test]
+ async fn test_settings_profile_with_user_base(cx: &mut TestAppContext) {
+ let user_settings_json = json!({
+ "buffer_font_size": 10.0,
+ "profiles": {
+ "Explicit User": {
+ "base": "user",
+ "settings": {
+ "buffer_font_size": 20.0
+ }
+ },
+ "Implicit User": {
+ "settings": {
+ "buffer_font_size": 20.0
+ }
+ }
+ }
+ });
+ let (workspace, cx) = init_test(user_settings_json, cx).await;
+
+ // Select "Explicit User" (index 1) — profile applies on top of user settings.
+ cx.dispatch_action(settings_profile_selector::Toggle);
+ let picker = active_settings_profile_picker(&workspace, cx);
+ cx.dispatch_action(SelectNext);
+
+ picker.read_with(cx, |picker, cx| {
+ assert_eq!(
+ picker.delegate.selected_profile_name.as_deref(),
+ Some("Explicit User")
+ );
+ assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(20.0));
+ });
+
+ cx.dispatch_action(Confirm);
+
+ // Select "Implicit User" (index 2) — no base specified, same behavior.
+ cx.dispatch_action(settings_profile_selector::Toggle);
+ let picker = active_settings_profile_picker(&workspace, cx);
+ cx.dispatch_action(SelectNext);
+
+ picker.read_with(cx, |picker, cx| {
+ assert_eq!(
+ picker.delegate.selected_profile_name.as_deref(),
+ Some("Implicit User")
+ );
+ assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(20.0));
+ });
+
+ cx.dispatch_action(Confirm);
+ }
+
+ #[gpui::test]
+ async fn test_settings_profile_with_default_base(cx: &mut TestAppContext) {
+ let user_settings_json = json!({
+ "buffer_font_size": 10.0,
+ "profiles": {
+ "Clean Slate": {
+ "base": "default"
+ },
+ "Custom on Defaults": {
+ "base": "default",
+ "settings": {
+ "buffer_font_size": 30.0
+ }
+ }
+ }
+ });
+ let (workspace, cx) = init_test(user_settings_json, cx).await;
+
+ // User has buffer_font_size: 10, factory default is 15.
+ cx.update(|_, cx| {
+ assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(10.0));
+ });
+
+ // "Clean Slate" has base: "default" with no settings overrides,
+ // so we get the factory default (15), not the user's value (10).
+ cx.dispatch_action(settings_profile_selector::Toggle);
+ let picker = active_settings_profile_picker(&workspace, cx);
+ cx.dispatch_action(SelectNext);
+
+ picker.read_with(cx, |picker, cx| {
+ assert_eq!(
+ picker.delegate.selected_profile_name.as_deref(),
+ Some("Clean Slate")
+ );
+ assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(15.0));
+ });
+
+ // "Custom on Defaults" has base: "default" with buffer_font_size: 30,
+ // so the profile's override (30) applies on top of the factory default,
+ // not on top of the user's value (10).
+ cx.dispatch_action(SelectNext);
+
+ picker.read_with(cx, |picker, cx| {
+ assert_eq!(
+ picker.delegate.selected_profile_name.as_deref(),
+ Some("Custom on Defaults")
+ );
+ assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(30.0));
+ });
+
+ cx.dispatch_action(Confirm);
+
+ cx.update(|_, cx| {
+ assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(30.0));
+ });
+ }
+
#[gpui::test]
async fn test_settings_profile_selector_is_in_user_configuration_order(
cx: &mut TestAppContext,
) {
// Must be unique names (HashMap)
- let profiles_json = json!({
- "z": {},
- "e": {},
- "d": {},
- " ": {},
- "r": {},
- "u": {},
- "l": {},
- "3": {},
- "s": {},
- "!": {},
+ let user_settings_json = json!({
+ "profiles": {
+ "z": { "settings": {} },
+ "e": { "settings": {} },
+ "d": { "settings": {} },
+ " ": { "settings": {} },
+ "r": { "settings": {} },
+ "u": { "settings": {} },
+ "l": { "settings": {} },
+ "3": { "settings": {} },
+ "s": { "settings": {} },
+ "!": { "settings": {} },
+ }
});
- let (workspace, cx) = init_test(profiles_json.clone(), cx).await;
+ let (workspace, cx) = init_test(user_settings_json, cx).await;
cx.dispatch_action(settings_profile_selector::Toggle);
let picker = active_settings_profile_picker(&workspace, cx);
@@ -59,6 +59,7 @@ ui.workspace = true
util.workspace = true
workspace.workspace = true
zed_actions.workspace = true
+zed_credentials_provider.workspace = true
[dev-dependencies]
fs = { workspace = true, features = ["test-support"] }
@@ -2447,7 +2447,7 @@ fn editor_page() -> SettingsPage {
]
}
- fn vim_settings_section() -> [SettingsPageItem; 12] {
+ fn vim_settings_section() -> [SettingsPageItem; 13] {
[
SettingsPageItem::SectionHeader("Vim"),
SettingsPageItem::SettingItem(SettingItem {
@@ -2556,6 +2556,24 @@ fn editor_page() -> SettingsPage {
metadata: None,
files: USER,
}),
+ SettingsPageItem::SettingItem(SettingItem {
+ title: "Regex Search",
+ description: "Use regex search by default in Vim search.",
+ field: Box::new(SettingField {
+ json_path: Some("vim.use_regex_search"),
+ pick: |settings_content| {
+ settings_content.vim.as_ref()?.use_regex_search.as_ref()
+ },
+ write: |settings_content, value| {
+ settings_content
+ .vim
+ .get_or_insert_default()
+ .use_regex_search = value;
+ },
+ }),
+ metadata: None,
+ files: USER,
+ }),
SettingsPageItem::SettingItem(SettingItem {
title: "Cursor Shape - Normal Mode",
description: "Cursor shape for normal mode.",
@@ -4159,7 +4177,7 @@ fn window_and_layout_page() -> SettingsPage {
]
}
- fn layout_section() -> [SettingsPageItem; 4] {
+ fn layout_section() -> [SettingsPageItem; 6] {
[
SettingsPageItem::SectionHeader("Layout"),
SettingsPageItem::SettingItem(SettingItem {
@@ -4223,6 +4241,52 @@ fn window_and_layout_page() -> SettingsPage {
}),
metadata: None,
}),
+ SettingsPageItem::SettingItem(SettingItem {
+ title: "Focus Follows Mouse",
+ description: "Whether to change focus to a pane when the mouse hovers over it.",
+ field: Box::new(SettingField {
+ json_path: Some("focus_follows_mouse.enabled"),
+ pick: |settings_content| {
+ settings_content
+ .workspace
+ .focus_follows_mouse
+ .as_ref()
+ .and_then(|s| s.enabled.as_ref())
+ },
+ write: |settings_content, value| {
+ settings_content
+ .workspace
+ .focus_follows_mouse
+ .get_or_insert_default()
+ .enabled = value;
+ },
+ }),
+ metadata: None,
+ files: USER,
+ }),
+ SettingsPageItem::SettingItem(SettingItem {
+ title: "Focus Follows Mouse Debounce ms",
+ description: "Amount of time to wait before changing focus.",
+ field: Box::new(SettingField {
+ json_path: Some("focus_follows_mouse.debounce_ms"),
+ pick: |settings_content| {
+ settings_content
+ .workspace
+ .focus_follows_mouse
+ .as_ref()
+ .and_then(|s| s.debounce_ms.as_ref())
+ },
+ write: |settings_content, value| {
+ settings_content
+ .workspace
+ .focus_follows_mouse
+ .get_or_insert_default()
+ .debounce_ms = value;
+ },
+ }),
+ metadata: None,
+ files: USER,
+ }),
]
}
@@ -4387,7 +4451,7 @@ fn window_and_layout_page() -> SettingsPage {
}
fn panels_page() -> SettingsPage {
- fn project_panel_section() -> [SettingsPageItem; 24] {
+ fn project_panel_section() -> [SettingsPageItem; 28] {
[
SettingsPageItem::SectionHeader("Project Panel"),
SettingsPageItem::SettingItem(SettingItem {
@@ -4868,31 +4932,25 @@ fn panels_page() -> SettingsPage {
files: USER,
}),
SettingsPageItem::SettingItem(SettingItem {
- title: "Hidden Files",
- description: "Globs to match files that will be considered \"hidden\" and can be hidden from the project panel.",
- field: Box::new(
- SettingField {
- json_path: Some("worktree.hidden_files"),
- pick: |settings_content| {
- settings_content.project.worktree.hidden_files.as_ref()
- },
- write: |settings_content, value| {
- settings_content.project.worktree.hidden_files = value;
- },
- }
- .unimplemented(),
- ),
+ title: "Sort Mode",
+ description: "Sort order for entries in the project panel.",
+ field: Box::new(SettingField {
+ json_path: Some("project_panel.sort_mode"),
+ pick: |settings_content| {
+ settings_content.project_panel.as_ref()?.sort_mode.as_ref()
+ },
+ write: |settings_content, value| {
+ settings_content
+ .project_panel
+ .get_or_insert_default()
+ .sort_mode = value;
+ },
+ }),
metadata: None,
files: USER,
}),
- ]
- }
-
- fn auto_open_files_section() -> [SettingsPageItem; 5] {
- [
- SettingsPageItem::SectionHeader("Auto Open Files"),
SettingsPageItem::SettingItem(SettingItem {
- title: "On Create",
+ title: "Auto Open Files On Create",
description: "Whether to automatically open newly created files in the editor.",
field: Box::new(SettingField {
json_path: Some("project_panel.auto_open.on_create"),
@@ -4918,7 +4976,7 @@ fn panels_page() -> SettingsPage {
files: USER,
}),
SettingsPageItem::SettingItem(SettingItem {
- title: "On Paste",
+ title: "Auto Open Files On Paste",
description: "Whether to automatically open files after pasting or duplicating them.",
field: Box::new(SettingField {
json_path: Some("project_panel.auto_open.on_paste"),
@@ -4944,7 +5002,7 @@ fn panels_page() -> SettingsPage {
files: USER,
}),
SettingsPageItem::SettingItem(SettingItem {
- title: "On Drop",
+ title: "Auto Open Files On Drop",
description: "Whether to automatically open files dropped from external sources.",
field: Box::new(SettingField {
json_path: Some("project_panel.auto_open.on_drop"),
@@ -4970,27 +5028,27 @@ fn panels_page() -> SettingsPage {
files: USER,
}),
SettingsPageItem::SettingItem(SettingItem {
- title: "Sort Mode",
- description: "Sort order for entries in the project panel.",
- field: Box::new(SettingField {
- pick: |settings_content| {
- settings_content.project_panel.as_ref()?.sort_mode.as_ref()
- },
- write: |settings_content, value| {
- settings_content
- .project_panel
- .get_or_insert_default()
- .sort_mode = value;
- },
- json_path: Some("project_panel.sort_mode"),
- }),
+ title: "Hidden Files",
+ description: "Globs to match files that will be considered \"hidden\" and can be hidden from the project panel.",
+ field: Box::new(
+ SettingField {
+ json_path: Some("worktree.hidden_files"),
+ pick: |settings_content| {
+ settings_content.project.worktree.hidden_files.as_ref()
+ },
+ write: |settings_content, value| {
+ settings_content.project.worktree.hidden_files = value;
+ },
+ }
+ .unimplemented(),
+ ),
metadata: None,
files: USER,
}),
]
}
- fn terminal_panel_section() -> [SettingsPageItem; 3] {
+ fn terminal_panel_section() -> [SettingsPageItem; 4] {
[
SettingsPageItem::SectionHeader("Terminal Panel"),
SettingsPageItem::SettingItem(SettingItem {
@@ -5006,6 +5064,19 @@ fn panels_page() -> SettingsPage {
metadata: None,
files: USER,
}),
+ SettingsPageItem::SettingItem(SettingItem {
+ title: "Terminal Panel Flexible Sizing",
+ description: "Whether the terminal panel should use flexible (proportional) sizing when docked to the left or right.",
+ field: Box::new(SettingField {
+ json_path: Some("terminal.flexible"),
+ pick: |settings_content| settings_content.terminal.as_ref()?.flexible.as_ref(),
+ write: |settings_content, value| {
+ settings_content.terminal.get_or_insert_default().flexible = value;
+ },
+ }),
+ metadata: None,
+ files: USER,
+ }),
SettingsPageItem::SettingItem(SettingItem {
title: "Show Count Badge",
description: "Show a badge on the terminal panel icon with the count of open terminals.",
@@ -5666,7 +5737,7 @@ fn panels_page() -> SettingsPage {
]
}
- fn agent_panel_section() -> [SettingsPageItem; 5] {
+ fn agent_panel_section() -> [SettingsPageItem; 6] {
[
SettingsPageItem::SectionHeader("Agent Panel"),
SettingsPageItem::SettingItem(SettingItem {
@@ -5695,6 +5766,19 @@ fn panels_page() -> SettingsPage {
metadata: None,
files: USER,
}),
+ SettingsPageItem::SettingItem(SettingItem {
+ title: "Agent Panel Flexible Sizing",
+ description: "Whether the agent panel should use flexible (proportional) sizing when docked to the left or right.",
+ field: Box::new(SettingField {
+ json_path: Some("agent.flexible"),
+ pick: |settings_content| settings_content.agent.as_ref()?.flexible.as_ref(),
+ write: |settings_content, value| {
+ settings_content.agent.get_or_insert_default().flexible = value;
+ },
+ }),
+ metadata: None,
+ files: USER,
+ }),
SettingsPageItem::SettingItem(SettingItem {
title: "Agent Panel Default Width",
description: "Default width when the agent panel is docked to the left or right.",
@@ -5735,7 +5819,6 @@ fn panels_page() -> SettingsPage {
title: "Panels",
items: concat_sections![
project_panel_section(),
- auto_open_files_section(),
terminal_panel_section(),
outline_panel_section(),
git_panel_section(),
@@ -7278,7 +7361,7 @@ fn ai_page(cx: &App) -> SettingsPage {
}),
SettingsPageItem::SettingItem(SettingItem {
title: "Play Sound When Agent Done",
- description: "Whether to play a sound when the agent has either completed its response, or needs user input.",
+ description: "When to play a sound when the agent has either completed its response, or needs user input.",
field: Box::new(SettingField {
json_path: Some("agent.play_sound_when_agent_done"),
pick: |settings_content| {
@@ -7444,6 +7527,24 @@ fn ai_page(cx: &App) -> SettingsPage {
metadata: None,
files: USER,
}),
+ SettingsPageItem::SettingItem(SettingItem {
+ title: "Show Merge Conflict Indicator",
+ description: "Whether to show the merge conflict indicator in the status bar that offers to resolve conflicts using the agent.",
+ field: Box::new(SettingField {
+ json_path: Some("agent.show_merge_conflict_indicator"),
+ pick: |settings_content| {
+ settings_content.agent.as_ref()?.show_merge_conflict_indicator.as_ref()
+ },
+ write: |settings_content, value| {
+ settings_content
+ .agent
+ .get_or_insert_default()
+ .show_merge_conflict_indicator = value;
+ },
+ }),
+ metadata: None,
+ files: USER,
+ }),
]);
items.into_boxed_slice()
@@ -185,9 +185,15 @@ fn render_api_key_provider(
cx: &mut Context<SettingsWindow>,
) -> impl IntoElement {
let weak_page = cx.weak_entity();
+ let credentials_provider = zed_credentials_provider::global(cx);
_ = window.use_keyed_state(current_url(cx), cx, |_, cx| {
let task = api_key_state.update(cx, |key_state, cx| {
- key_state.load_if_needed(current_url(cx), |state| state, cx)
+ key_state.load_if_needed(
+ current_url(cx),
+ |state| state,
+ credentials_provider.clone(),
+ cx,
+ )
});
cx.spawn(async move |_, cx| {
task.await.ok();
@@ -208,10 +214,17 @@ fn render_api_key_provider(
});
let write_key = move |api_key: Option<String>, cx: &mut App| {
+ let credentials_provider = zed_credentials_provider::global(cx);
api_key_state
.update(cx, |key_state, cx| {
let url = current_url(cx);
- key_state.store(url, api_key, |key_state| key_state, cx)
+ key_state.store(
+ url,
+ api_key,
+ |key_state| key_state,
+ credentials_provider,
+ cx,
+ )
})
.detach_and_log_err(cx);
};
@@ -500,18 +500,18 @@ fn init_renderers(cx: &mut App) {
.add_basic_renderer::<settings::TerminalBlink>(render_dropdown)
.add_basic_renderer::<settings::CursorShapeContent>(render_dropdown)
.add_basic_renderer::<settings::EditPredictionPromptFormat>(render_dropdown)
- .add_basic_renderer::<f32>(render_number_field)
- .add_basic_renderer::<u32>(render_number_field)
- .add_basic_renderer::<u64>(render_number_field)
- .add_basic_renderer::<usize>(render_number_field)
- .add_basic_renderer::<NonZero<usize>>(render_number_field)
- .add_basic_renderer::<NonZeroU32>(render_number_field)
- .add_basic_renderer::<settings::CodeFade>(render_number_field)
- .add_basic_renderer::<settings::DelayMs>(render_number_field)
- .add_basic_renderer::<settings::FontWeightContent>(render_number_field)
- .add_basic_renderer::<settings::CenteredPaddingSettings>(render_number_field)
- .add_basic_renderer::<settings::InactiveOpacity>(render_number_field)
- .add_basic_renderer::<settings::MinimumContrast>(render_number_field)
+ .add_basic_renderer::<f32>(render_editable_number_field)
+ .add_basic_renderer::<u32>(render_editable_number_field)
+ .add_basic_renderer::<u64>(render_editable_number_field)
+ .add_basic_renderer::<usize>(render_editable_number_field)
+ .add_basic_renderer::<NonZero<usize>>(render_editable_number_field)
+ .add_basic_renderer::<NonZeroU32>(render_editable_number_field)
+ .add_basic_renderer::<settings::CodeFade>(render_editable_number_field)
+ .add_basic_renderer::<settings::DelayMs>(render_editable_number_field)
+ .add_basic_renderer::<settings::FontWeightContent>(render_editable_number_field)
+ .add_basic_renderer::<settings::CenteredPaddingSettings>(render_editable_number_field)
+ .add_basic_renderer::<settings::InactiveOpacity>(render_editable_number_field)
+ .add_basic_renderer::<settings::MinimumContrast>(render_editable_number_field)
.add_basic_renderer::<settings::ShowScrollbar>(render_dropdown)
.add_basic_renderer::<settings::ScrollbarDiagnostics>(render_dropdown)
.add_basic_renderer::<settings::ShowMinimap>(render_dropdown)
@@ -523,6 +523,7 @@ fn init_renderers(cx: &mut App) {
.add_basic_renderer::<settings::VimInsertModeCursorShape>(render_dropdown)
.add_basic_renderer::<settings::SteppingGranularity>(render_dropdown)
.add_basic_renderer::<settings::NotifyWhenAgentWaiting>(render_dropdown)
+ .add_basic_renderer::<settings::PlaySoundWhenAgentDone>(render_dropdown)
.add_basic_renderer::<settings::NewThreadLocation>(render_dropdown)
.add_basic_renderer::<settings::ThinkingBlockDisplay>(render_dropdown)
.add_basic_renderer::<settings::ImageFileSizeUnit>(render_dropdown)
@@ -3752,7 +3753,6 @@ fn all_projects(
.flat_map(|multi_workspace| {
multi_workspace
.workspaces()
- .iter()
.map(|workspace| workspace.read(cx).project().clone())
.collect::<Vec<_>>()
}),
@@ -4050,41 +4050,6 @@ fn render_toggle_button<B: Into<bool> + From<bool> + Copy>(
.into_any_element()
}
-fn render_number_field<T: NumberFieldType + Send + Sync>(
- field: SettingField<T>,
- file: SettingsUiFile,
- _metadata: Option<&SettingsFieldMetadata>,
- window: &mut Window,
- cx: &mut App,
-) -> AnyElement {
- let (_, value) = SettingsStore::global(cx).get_value_from_file(file.to_settings(), field.pick);
- let value = value.copied().unwrap_or_else(T::min_value);
-
- let id = field
- .json_path
- .map(|p| format!("numeric_stepper_{}", p))
- .unwrap_or_else(|| "numeric_stepper".to_string());
-
- NumberField::new(id, value, window, cx)
- .tab_index(0_isize)
- .on_change({
- move |value, window, cx| {
- let value = *value;
- update_settings_file(
- file.clone(),
- field.json_path,
- window,
- cx,
- move |settings, _cx| {
- (field.write)(settings, Some(value));
- },
- )
- .log_err(); // todo(settings_ui) don't log err
- }
- })
- .into_any_element()
-}
-
fn render_editable_number_field<T: NumberFieldType + Send + Sync>(
field: SettingField<T>,
file: SettingsUiFile,
@@ -23,7 +23,6 @@ agent_settings.workspace = true
agent_ui = { workspace = true, features = ["audio"] }
anyhow.workspace = true
chrono.workspace = true
-collections.workspace = true
editor.workspace = true
feature_flags.workspace = true
fs.workspace = true
@@ -1,420 +0,0 @@
-//! The sidebar groups threads by a canonical path list.
-//!
-//! Threads have a path list associated with them, but this is the absolute path
-//! of whatever worktrees they were associated with. In the sidebar, we want to
-//! group all threads by their main worktree, and then we add a worktree chip to
-//! the sidebar entry when that thread is in another worktree.
-//!
-//! This module is provides the functions and structures necessary to do this
-//! lookup and mapping.
-
-use collections::{HashMap, HashSet, vecmap::VecMap};
-use std::{
- path::{Component, Path, PathBuf},
- sync::Arc,
-};
-
-use gpui::{App, Entity};
-use ui::SharedString;
-use workspace::{MultiWorkspace, PathList, Workspace};
-
-/// Identifies a project group by a set of paths the workspaces in this group
-/// have.
-///
-/// Paths are mapped to their main worktree path first so we can group
-/// workspaces by main repos.
-#[derive(PartialEq, Eq, Hash, Clone)]
-pub struct ProjectGroupName {
- path_list: PathList,
-}
-
-pub(crate) fn path_suffix(path: &Path, detail: usize) -> String {
- let components: Vec<_> = path
- .components()
- .filter_map(|c| match c {
- Component::Normal(s) => Some(s.to_string_lossy()),
- _ => None,
- })
- .collect();
- let start = components.len().saturating_sub(detail + 1);
- components[start..].join("/")
-}
-
-impl ProjectGroupName {
- pub fn display_name_from_suffixes(
- &self,
- path_detail_map: &HashMap<PathBuf, usize>,
- ) -> SharedString {
- let mut names = Vec::with_capacity(self.path_list.paths().len());
- for abs_path in self.path_list.paths() {
- let detail = path_detail_map.get(abs_path).copied().unwrap_or(0);
- let suffix = path_suffix(abs_path, detail);
- if !suffix.is_empty() {
- names.push(suffix);
- }
- }
- if names.is_empty() {
- "Empty Workspace".into()
- } else {
- names.join(", ").into()
- }
- }
-
- pub fn path_list(&self) -> &PathList {
- &self.path_list
- }
-}
-
-#[derive(Default)]
-pub struct ProjectGroup {
- pub workspaces: Vec<Entity<Workspace>>,
- /// Root paths of all open workspaces in this group. Used to skip
- /// redundant thread-store queries for linked worktrees that already
- /// have an open workspace.
- covered_paths: HashSet<Arc<Path>>,
-}
-
-impl ProjectGroup {
- fn add_workspace(&mut self, workspace: &Entity<Workspace>, cx: &App) {
- if !self.workspaces.contains(workspace) {
- self.workspaces.push(workspace.clone());
- }
- for path in workspace.read(cx).root_paths(cx) {
- self.covered_paths.insert(path);
- }
- }
-
- pub fn first_workspace(&self) -> &Entity<Workspace> {
- self.workspaces
- .first()
- .expect("groups always have at least one workspace")
- }
-
- pub fn main_workspace(&self, cx: &App) -> &Entity<Workspace> {
- self.workspaces
- .iter()
- .find(|ws| {
- !crate::root_repository_snapshots(ws, cx)
- .any(|snapshot| snapshot.is_linked_worktree())
- })
- .unwrap_or_else(|| self.first_workspace())
- }
-}
-
-pub struct ProjectGroupBuilder {
- /// Maps git repositories' work_directory_abs_path to their original_repo_abs_path
- directory_mappings: HashMap<PathBuf, PathBuf>,
- project_groups: VecMap<ProjectGroupName, ProjectGroup>,
-}
-
-impl ProjectGroupBuilder {
- fn new() -> Self {
- Self {
- directory_mappings: HashMap::default(),
- project_groups: VecMap::new(),
- }
- }
-
- pub fn from_multiworkspace(mw: &MultiWorkspace, cx: &App) -> Self {
- let mut builder = Self::new();
- // First pass: collect all directory mappings from every workspace
- // so we know how to canonicalize any path (including linked
- // worktree paths discovered by the main repo's workspace).
- for workspace in mw.workspaces() {
- builder.add_workspace_mappings(workspace.read(cx), cx);
- }
-
- // Second pass: group each workspace using canonical paths derived
- // from the full set of mappings.
- for workspace in mw.workspaces() {
- let group_name = builder.canonical_workspace_paths(workspace, cx);
- builder
- .project_group_entry(&group_name)
- .add_workspace(workspace, cx);
- }
- builder
- }
-
- fn project_group_entry(&mut self, name: &ProjectGroupName) -> &mut ProjectGroup {
- self.project_groups.entry_ref(name).or_insert_default()
- }
-
- fn add_mapping(&mut self, work_directory: &Path, original_repo: &Path) {
- let old = self
- .directory_mappings
- .insert(PathBuf::from(work_directory), PathBuf::from(original_repo));
- if let Some(old) = old {
- debug_assert_eq!(
- &old, original_repo,
- "all worktrees should map to the same main worktree"
- );
- }
- }
-
- pub fn add_workspace_mappings(&mut self, workspace: &Workspace, cx: &App) {
- for repo in workspace.project().read(cx).repositories(cx).values() {
- let snapshot = repo.read(cx).snapshot();
-
- self.add_mapping(
- &snapshot.work_directory_abs_path,
- &snapshot.original_repo_abs_path,
- );
-
- for worktree in snapshot.linked_worktrees.iter() {
- self.add_mapping(&worktree.path, &snapshot.original_repo_abs_path);
- }
- }
- }
-
- /// Derives the canonical group name for a workspace by canonicalizing
- /// each of its root paths using the builder's directory mappings.
- fn canonical_workspace_paths(
- &self,
- workspace: &Entity<Workspace>,
- cx: &App,
- ) -> ProjectGroupName {
- let root_paths = workspace.read(cx).root_paths(cx);
- let paths: Vec<_> = root_paths
- .iter()
- .map(|p| self.canonicalize_path(p).to_path_buf())
- .collect();
- ProjectGroupName {
- path_list: PathList::new(&paths),
- }
- }
-
- pub fn canonicalize_path<'a>(&'a self, path: &'a Path) -> &'a Path {
- self.directory_mappings
- .get(path)
- .map(AsRef::as_ref)
- .unwrap_or(path)
- }
-
- /// Whether the given group should load threads for a linked worktree
- /// at `worktree_path`. Returns `false` if the worktree already has an
- /// open workspace in the group (its threads are loaded via the
- /// workspace loop) or if the worktree's canonical path list doesn't
- /// match `group_path_list`.
- pub fn group_owns_worktree(
- &self,
- group: &ProjectGroup,
- group_path_list: &PathList,
- worktree_path: &Path,
- ) -> bool {
- if group.covered_paths.contains(worktree_path) {
- return false;
- }
- let canonical = self.canonicalize_path_list(&PathList::new(&[worktree_path]));
- canonical == *group_path_list
- }
-
- /// Canonicalizes every path in a [`PathList`] using the builder's
- /// directory mappings.
- fn canonicalize_path_list(&self, path_list: &PathList) -> PathList {
- let paths: Vec<_> = path_list
- .paths()
- .iter()
- .map(|p| self.canonicalize_path(p).to_path_buf())
- .collect();
- PathList::new(&paths)
- }
-
- pub fn groups(&self) -> impl Iterator<Item = (&ProjectGroupName, &ProjectGroup)> {
- self.project_groups.iter()
- }
-}
-
-#[cfg(test)]
-mod tests {
- use std::sync::Arc;
-
- use super::*;
- use fs::FakeFs;
- use gpui::TestAppContext;
- use settings::SettingsStore;
-
- fn init_test(cx: &mut TestAppContext) {
- cx.update(|cx| {
- let settings_store = SettingsStore::test(cx);
- cx.set_global(settings_store);
- theme_settings::init(theme::LoadThemes::JustBase, cx);
- });
- }
-
- async fn create_fs_with_main_and_worktree(cx: &mut TestAppContext) -> Arc<FakeFs> {
- let fs = FakeFs::new(cx.executor());
- fs.insert_tree(
- "/project",
- serde_json::json!({
- ".git": {
- "worktrees": {
- "feature-a": {
- "commondir": "../../",
- "HEAD": "ref: refs/heads/feature-a",
- },
- },
- },
- "src": {},
- }),
- )
- .await;
- fs.insert_tree(
- "/wt/feature-a",
- serde_json::json!({
- ".git": "gitdir: /project/.git/worktrees/feature-a",
- "src": {},
- }),
- )
- .await;
- fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| {
- state.worktrees.push(git::repository::Worktree {
- path: std::path::PathBuf::from("/wt/feature-a"),
- ref_name: Some("refs/heads/feature-a".into()),
- sha: "abc".into(),
- is_main: false,
- });
- })
- .expect("git state should be set");
- fs
- }
-
- #[gpui::test]
- async fn test_main_repo_maps_to_itself(cx: &mut TestAppContext) {
- init_test(cx);
- let fs = create_fs_with_main_and_worktree(cx).await;
- cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
-
- let project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await;
- project
- .update(cx, |project, cx| project.git_scans_complete(cx))
- .await;
-
- let (multi_workspace, cx) = cx.add_window_view(|window, cx| {
- workspace::MultiWorkspace::test_new(project.clone(), window, cx)
- });
-
- multi_workspace.read_with(cx, |mw, cx| {
- let mut canonicalizer = ProjectGroupBuilder::new();
- for workspace in mw.workspaces() {
- canonicalizer.add_workspace_mappings(workspace.read(cx), cx);
- }
-
- // The main repo path should canonicalize to itself.
- assert_eq!(
- canonicalizer.canonicalize_path(Path::new("/project")),
- Path::new("/project"),
- );
-
- // An unknown path returns None.
- assert_eq!(
- canonicalizer.canonicalize_path(Path::new("/something/else")),
- Path::new("/something/else"),
- );
- });
- }
-
- fn group_name_from_paths(paths: &[&str]) -> ProjectGroupName {
- ProjectGroupName {
- path_list: PathList::new(paths),
- }
- }
-
- #[test]
- fn test_path_suffix_detail_zero() {
- assert_eq!(path_suffix(Path::new("/a/b/c"), 0), "c");
- }
-
- #[test]
- fn test_path_suffix_detail_one() {
- assert_eq!(path_suffix(Path::new("/a/b/c"), 1), "b/c");
- }
-
- #[test]
- fn test_path_suffix_detail_two() {
- assert_eq!(path_suffix(Path::new("/a/b/c"), 2), "a/b/c");
- }
-
- #[test]
- fn test_path_suffix_clamped() {
- let result = path_suffix(Path::new("/a/b"), 5);
- assert_eq!(result, "a/b");
- }
-
- #[test]
- fn test_display_name_from_suffixes_single_path() {
- let name = group_name_from_paths(&["/code/zed"]);
- let map = HashMap::default();
- assert_eq!(name.display_name_from_suffixes(&map).as_ref(), "zed");
-
- let map = HashMap::from_iter([(PathBuf::from("/code/zed"), 1)]);
- assert_eq!(name.display_name_from_suffixes(&map).as_ref(), "code/zed");
- }
-
- #[test]
- fn test_display_name_from_suffixes_multiple_paths() {
- let name = group_name_from_paths(&["/a/zed", "/b/bar"]);
-
- let map = HashMap::default();
- assert_eq!(
- name.display_name_from_suffixes(&map).as_ref(),
- "zed, bar",
- "PathList sorts lexicographically, so /a/zed comes before /b/bar"
- );
-
- let map = HashMap::from_iter([(PathBuf::from("/a/zed"), 1), (PathBuf::from("/b/bar"), 0)]);
- assert_eq!(name.display_name_from_suffixes(&map).as_ref(), "a/zed, bar");
- }
-
- #[test]
- fn test_display_name_from_suffixes_empty() {
- let name = group_name_from_paths(&[]);
- let map = HashMap::default();
- assert_eq!(
- name.display_name_from_suffixes(&map).as_ref(),
- "Empty Workspace"
- );
- }
-
- #[test]
- fn test_display_name_from_suffixes_per_path_detail() {
- let name = group_name_from_paths(&["/code/zed", "/code/bar/zed"]);
- let map = HashMap::from_iter([
- (PathBuf::from("/code/zed"), 1),
- (PathBuf::from("/code/bar/zed"), 1),
- ]);
- assert_eq!(
- name.display_name_from_suffixes(&map).as_ref(),
- "bar/zed, code/zed",
- );
- }
-
- #[gpui::test]
- async fn test_worktree_checkout_canonicalizes_to_main_repo(cx: &mut TestAppContext) {
- init_test(cx);
- let fs = create_fs_with_main_and_worktree(cx).await;
- cx.update(|cx| <dyn fs::Fs>::set_global(fs.clone(), cx));
-
- // Open the worktree checkout as its own project.
- let project = project::Project::test(fs.clone(), ["/wt/feature-a".as_ref()], cx).await;
- project
- .update(cx, |project, cx| project.git_scans_complete(cx))
- .await;
-
- let (multi_workspace, cx) = cx.add_window_view(|window, cx| {
- workspace::MultiWorkspace::test_new(project.clone(), window, cx)
- });
-
- multi_workspace.read_with(cx, |mw, cx| {
- let mut canonicalizer = ProjectGroupBuilder::new();
- for workspace in mw.workspaces() {
- canonicalizer.add_workspace_mappings(workspace.read(cx), cx);
- }
-
- // The worktree checkout path should canonicalize to the main repo.
- assert_eq!(
- canonicalizer.canonicalize_path(Path::new("/wt/feature-a")),
- Path::new("/project"),
- );
- });
- }
-}
@@ -23,7 +23,9 @@ use gpui::{
use menu::{
Cancel, Confirm, SelectChild, SelectFirst, SelectLast, SelectNext, SelectParent, SelectPrevious,
};
-use project::{AgentId, AgentRegistryStore, Event as ProjectEvent, linked_worktree_short_name};
+use project::{
+ AgentId, AgentRegistryStore, Event as ProjectEvent, ProjectGroupKey, linked_worktree_short_name,
+};
use recent_projects::sidebar_recent_projects::SidebarRecentProjects;
use remote::RemoteConnectionOptions;
use ui::utils::platform_title_bar_height;
@@ -54,10 +56,6 @@ use zed_actions::agents_sidebar::{FocusSidebarFilter, ToggleThreadSwitcher};
use crate::thread_switcher::{ThreadSwitcher, ThreadSwitcherEntry, ThreadSwitcherEvent};
-use crate::project_group_builder::ProjectGroupBuilder;
-
-mod project_group_builder;
-
#[cfg(test)]
mod sidebar_tests;
@@ -136,13 +134,7 @@ impl ActiveEntry {
(ActiveEntry::Thread { session_id, .. }, ListEntry::Thread(thread)) => {
thread.metadata.session_id == *session_id
}
- (
- ActiveEntry::Draft(workspace),
- ListEntry::NewThread {
- workspace: entry_workspace,
- ..
- },
- ) => workspace == entry_workspace,
+ (ActiveEntry::Draft(_workspace), ListEntry::DraftThread { .. }) => true,
_ => false,
}
}
@@ -209,9 +201,8 @@ impl ThreadEntry {
#[derive(Clone)]
enum ListEntry {
ProjectHeader {
- path_list: PathList,
+ key: ProjectGroupKey,
label: SharedString,
- workspace: Entity<Workspace>,
highlight_positions: Vec<usize>,
has_running_threads: bool,
waiting_thread_count: usize,
@@ -219,30 +210,25 @@ enum ListEntry {
},
Thread(ThreadEntry),
ViewMore {
- path_list: PathList,
+ key: ProjectGroupKey,
is_fully_expanded: bool,
},
+ /// The user's active draft thread. Shows a prefix of the currently-typed
+ /// prompt, or "Untitled Thread" if the prompt is empty.
+ DraftThread {
+ worktrees: Vec<WorktreeInfo>,
+ },
+ /// A convenience row for starting a new thread. Shown when a project group
+ /// has no threads, or when the active workspace contains linked worktrees
+ /// with no threads for that specific worktree set.
NewThread {
- path_list: PathList,
- workspace: Entity<Workspace>,
+ key: project::ProjectGroupKey,
worktrees: Vec<WorktreeInfo>,
},
}
#[cfg(test)]
impl ListEntry {
- fn workspace(&self) -> Option<Entity<Workspace>> {
- match self {
- ListEntry::ProjectHeader { workspace, .. } => Some(workspace.clone()),
- ListEntry::Thread(thread_entry) => match &thread_entry.workspace {
- ThreadEntryWorkspace::Open(workspace) => Some(workspace.clone()),
- ThreadEntryWorkspace::Closed(_) => None,
- },
- ListEntry::ViewMore { .. } => None,
- ListEntry::NewThread { workspace, .. } => Some(workspace.clone()),
- }
- }
-
fn session_id(&self) -> Option<&acp::SessionId> {
match self {
ListEntry::Thread(thread_entry) => Some(&thread_entry.metadata.session_id),
@@ -321,27 +307,32 @@ fn workspace_path_list(workspace: &Entity<Workspace>, cx: &App) -> PathList {
/// Derives worktree display info from a thread's stored path list.
///
-/// For each path in the thread's `folder_paths` that canonicalizes to a
-/// different path (i.e. it's a git worktree), produces a [`WorktreeInfo`]
-/// with the short worktree name and full path.
+/// For each path in the thread's `folder_paths` that is not one of the
+/// group's main paths (i.e. it's a git linked worktree), produces a
+/// [`WorktreeInfo`] with the short worktree name and full path.
fn worktree_info_from_thread_paths(
folder_paths: &PathList,
- project_groups: &ProjectGroupBuilder,
+ group_key: &project::ProjectGroupKey,
) -> Vec<WorktreeInfo> {
+ let main_paths = group_key.path_list().paths();
folder_paths
.paths()
.iter()
.filter_map(|path| {
- let canonical = project_groups.canonicalize_path(path);
- if canonical != path.as_path() {
- Some(WorktreeInfo {
- name: linked_worktree_short_name(canonical, path).unwrap_or_default(),
- full_path: SharedString::from(path.display().to_string()),
- highlight_positions: Vec::new(),
- })
- } else {
- None
+ if main_paths.iter().any(|mp| mp.as_path() == path.as_path()) {
+ return None;
}
+ // Find the main path whose file name matches this linked
+ // worktree's file name, falling back to the first main path.
+ let main_path = main_paths
+ .iter()
+ .find(|mp| mp.file_name() == path.file_name())
+ .or(main_paths.first())?;
+ Some(WorktreeInfo {
+ name: linked_worktree_short_name(main_path, path).unwrap_or_default(),
+ full_path: SharedString::from(path.display().to_string()),
+ highlight_positions: Vec::new(),
+ })
})
.collect()
}
@@ -443,7 +434,7 @@ impl Sidebar {
})
.detach();
- let workspaces = multi_workspace.read(cx).workspaces().to_vec();
+ let workspaces: Vec<_> = multi_workspace.read(cx).workspaces().cloned().collect();
cx.defer_in(window, move |this, window, cx| {
for workspace in &workspaces {
this.subscribe_to_workspace(workspace, window, cx);
@@ -677,10 +668,37 @@ impl Sidebar {
result
}
+ /// Finds an open workspace whose project group key matches the given path list.
+ fn workspace_for_group(&self, path_list: &PathList, cx: &App) -> Option<Entity<Workspace>> {
+ let mw = self.multi_workspace.upgrade()?;
+ let mw = mw.read(cx);
+ mw.workspaces()
+ .find(|ws| ws.read(cx).project_group_key(cx).path_list() == path_list)
+ .cloned()
+ }
+
+ /// Opens a new workspace for a group that has no open workspaces.
+ fn open_workspace_for_group(
+ &mut self,
+ path_list: &PathList,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let Some(multi_workspace) = self.multi_workspace.upgrade() else {
+ return;
+ };
+
+ multi_workspace
+ .update(cx, |this, cx| {
+ this.find_or_create_local_workspace(path_list.clone(), window, cx)
+ })
+ .detach_and_log_err(cx);
+ }
+
/// Rebuilds the sidebar contents from current workspace and thread state.
///
- /// Uses [`ProjectGroupBuilder`] to group workspaces by their main git
- /// repository, then populates thread entries from the metadata store and
+ /// Iterates [`MultiWorkspace::project_group_keys`] to determine project
+ /// groups, then populates thread entries from the metadata store and
/// merges live thread info from active agent panels.
///
/// Aim for a single forward pass over workspaces and threads plus an
@@ -697,8 +715,8 @@ impl Sidebar {
return;
};
let mw = multi_workspace.read(cx);
- let workspaces = mw.workspaces().to_vec();
- let active_workspace = mw.workspaces().get(mw.active_workspace_index()).cloned();
+ let workspaces: Vec<_> = mw.workspaces().cloned().collect();
+ let active_workspace = Some(mw.workspace().clone());
let agent_server_store = workspaces
.first()
@@ -709,19 +727,25 @@ impl Sidebar {
// Derive active_entry from the active workspace's agent panel.
// Draft is checked first because a conversation can have a session_id
// before any messages are sent. However, a thread that's still loading
- // also appears as a "draft" (no messages yet), so when we already have
- // an eager Thread write for this workspace we preserve it. A session_id
- // on a non-draft is a positive Thread signal. The remaining case
- // (conversation exists, not draft, no session_id) is a genuine
- // mid-load — keep the previous value.
+ // also appears as a "draft" (no messages yet).
if let Some(active_ws) = &active_workspace {
if let Some(panel) = active_ws.read(cx).panel::<AgentPanel>(cx) {
if panel.read(cx).active_thread_is_draft(cx)
|| panel.read(cx).active_conversation_view().is_none()
{
+ let conversation_parent_id = panel
+ .read(cx)
+ .active_conversation_view()
+ .and_then(|cv| cv.read(cx).parent_id(cx));
let preserving_thread =
- matches!(&self.active_entry, Some(ActiveEntry::Thread { .. }))
- && self.active_entry_workspace() == Some(active_ws);
+ if let Some(ActiveEntry::Thread { session_id, .. }) = &self.active_entry {
+ self.active_entry_workspace() == Some(active_ws)
+ && conversation_parent_id
+ .as_ref()
+ .is_some_and(|id| id == session_id)
+ } else {
+ false
+ };
if !preserving_thread {
self.active_entry = Some(ActiveEntry::Draft(active_ws.clone()));
}
@@ -758,11 +782,6 @@ impl Sidebar {
let mut current_session_ids: HashSet<acp::SessionId> = HashSet::new();
let mut project_header_indices: Vec<usize> = Vec::new();
- // Use ProjectGroupBuilder to canonically group workspaces by their
- // main git repository. This replaces the manual absorbed-workspace
- // detection that was here before.
- let project_groups = ProjectGroupBuilder::from_multiworkspace(mw, cx);
-
let has_open_projects = workspaces
.iter()
.any(|ws| !workspace_path_list(ws, cx).paths().is_empty());
@@ -779,51 +798,41 @@ impl Sidebar {
(icon, icon_from_external_svg)
};
- let groups: Vec<_> = project_groups.groups().collect();
+ let groups: Vec<_> = mw.project_groups(cx).collect();
let all_paths: Vec<std::path::PathBuf> = groups
.iter()
- .flat_map(|(name, _)| name.path_list().paths().iter().cloned())
+ .flat_map(|(key, _)| key.path_list().paths().iter().cloned())
.collect();
let path_details =
util::disambiguate::compute_disambiguation_details(&all_paths, |path, detail| {
- crate::project_group_builder::path_suffix(path, detail)
+ project::path_suffix(path, detail)
});
- let path_detail_map: collections::HashMap<std::path::PathBuf, usize> =
+ let path_detail_map: HashMap<std::path::PathBuf, usize> =
all_paths.into_iter().zip(path_details).collect();
- for (group_name, group) in &groups {
- let path_list = group_name.path_list().clone();
+ for (group_key, group_workspaces) in &groups {
+ let path_list = group_key.path_list().clone();
if path_list.paths().is_empty() {
continue;
}
- let label = group_name.display_name_from_suffixes(&path_detail_map);
+ let label = group_key.display_name_from_suffixes(&path_detail_map);
let is_collapsed = self.collapsed_groups.contains(&path_list);
let should_load_threads = !is_collapsed || !query.is_empty();
let is_active = active_workspace
.as_ref()
- .is_some_and(|active| group.workspaces.contains(active));
-
- // Pick a representative workspace for the group: prefer the active
- // workspace if it belongs to this group, otherwise use the main
- // repo workspace (not a linked worktree).
- let representative_workspace = active_workspace
- .as_ref()
- .filter(|_| is_active)
- .unwrap_or_else(|| group.main_workspace(cx));
+ .is_some_and(|active| group_workspaces.contains(active));
// Collect live thread infos from all workspaces in this group.
- let live_infos: Vec<_> = group
- .workspaces
+ let live_infos: Vec<_> = group_workspaces
.iter()
.flat_map(|ws| all_thread_infos_for_workspace(ws, cx))
.collect();
let mut threads: Vec<ThreadEntry> = Vec::new();
- let mut threadless_workspaces: Vec<(Entity<Workspace>, Vec<WorktreeInfo>)> = Vec::new();
let mut has_running_threads = false;
let mut waiting_thread_count: usize = 0;
@@ -831,61 +840,88 @@ impl Sidebar {
let mut seen_session_ids: HashSet<acp::SessionId> = HashSet::new();
let thread_store = ThreadMetadataStore::global(cx);
- // Load threads from each workspace in the group.
- for workspace in &group.workspaces {
- let ws_path_list = workspace_path_list(workspace, cx);
- let mut workspace_rows = thread_store
- .read(cx)
- .entries_for_path(&ws_path_list)
- .cloned()
- .peekable();
- if workspace_rows.peek().is_none() {
- let worktrees =
- worktree_info_from_thread_paths(&ws_path_list, &project_groups);
- threadless_workspaces.push((workspace.clone(), worktrees));
+ // Build a lookup from workspace root paths to their workspace
+ // entity, used to assign ThreadEntryWorkspace::Open for threads
+ // whose folder_paths match an open workspace.
+ let workspace_by_path_list: HashMap<PathList, &Entity<Workspace>> =
+ group_workspaces
+ .iter()
+ .map(|ws| (workspace_path_list(ws, cx), ws))
+ .collect();
+
+ // Resolve a ThreadEntryWorkspace for a thread row. If any open
+ // workspace's root paths match the thread's folder_paths, use
+ // Open; otherwise use Closed.
+ let resolve_workspace = |row: &ThreadMetadata| -> ThreadEntryWorkspace {
+ workspace_by_path_list
+ .get(&row.folder_paths)
+ .map(|ws| ThreadEntryWorkspace::Open((*ws).clone()))
+ .unwrap_or_else(|| ThreadEntryWorkspace::Closed(row.folder_paths.clone()))
+ };
+
+ // Build a ThreadEntry from a metadata row.
+ let make_thread_entry = |row: ThreadMetadata,
+ workspace: ThreadEntryWorkspace|
+ -> ThreadEntry {
+ let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id);
+ let worktrees = worktree_info_from_thread_paths(&row.folder_paths, &group_key);
+ ThreadEntry {
+ metadata: row,
+ icon,
+ icon_from_external_svg,
+ status: AgentThreadStatus::default(),
+ workspace,
+ is_live: false,
+ is_background: false,
+ is_title_generating: false,
+ highlight_positions: Vec::new(),
+ worktrees,
+ diff_stats: DiffStats::default(),
}
- for row in workspace_rows {
- if !seen_session_ids.insert(row.session_id.clone()) {
- continue;
- }
- let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id);
- let worktrees =
- worktree_info_from_thread_paths(&row.folder_paths, &project_groups);
- threads.push(ThreadEntry {
- metadata: row,
- icon,
- icon_from_external_svg,
- status: AgentThreadStatus::default(),
- workspace: ThreadEntryWorkspace::Open(workspace.clone()),
- is_live: false,
- is_background: false,
- is_title_generating: false,
- highlight_positions: Vec::new(),
- worktrees,
- diff_stats: DiffStats::default(),
- });
+ };
+
+ // === Main code path: one query per group via main_worktree_paths ===
+ // The main_worktree_paths column is set on all new threads and
+ // points to the group's canonical paths regardless of which
+ // linked worktree the thread was opened in.
+ for row in thread_store
+ .read(cx)
+ .entries_for_main_worktree_path(&path_list)
+ .cloned()
+ {
+ if !seen_session_ids.insert(row.session_id.clone()) {
+ continue;
}
+ let workspace = resolve_workspace(&row);
+ threads.push(make_thread_entry(row, workspace));
}
- // Load threads from linked git worktrees whose
- // canonical paths belong to this group.
- let linked_worktree_queries = group
- .workspaces
- .iter()
- .flat_map(|ws| root_repository_snapshots(ws, cx))
- .filter(|snapshot| !snapshot.is_linked_worktree())
- .flat_map(|snapshot| {
- snapshot
- .linked_worktrees()
- .iter()
- .filter(|wt| {
- project_groups.group_owns_worktree(group, &path_list, &wt.path)
- })
- .map(|wt| PathList::new(std::slice::from_ref(&wt.path)))
- .collect::<Vec<_>>()
- });
+ // Legacy threads did not have `main_worktree_paths` populated, so they
+ // must be queried by their `folder_paths`.
+
+ // Load any legacy threads for the main worktrees of this project group.
+ for row in thread_store.read(cx).entries_for_path(&path_list).cloned() {
+ if !seen_session_ids.insert(row.session_id.clone()) {
+ continue;
+ }
+ let workspace = resolve_workspace(&row);
+ threads.push(make_thread_entry(row, workspace));
+ }
- for worktree_path_list in linked_worktree_queries {
+ // Load any legacy threads for any single linked wortree of this project group.
+ let mut linked_worktree_paths = HashSet::new();
+ for workspace in group_workspaces {
+ if workspace.read(cx).visible_worktrees(cx).count() != 1 {
+ continue;
+ }
+ for snapshot in root_repository_snapshots(workspace, cx) {
+ for linked_worktree in snapshot.linked_worktrees() {
+ linked_worktree_paths.insert(linked_worktree.path.clone());
+ }
+ }
+ }
+ for path in linked_worktree_paths {
+ let worktree_path_list = PathList::new(std::slice::from_ref(&path));
for row in thread_store
.read(cx)
.entries_for_path(&worktree_path_list)
@@ -894,22 +930,10 @@ impl Sidebar {
if !seen_session_ids.insert(row.session_id.clone()) {
continue;
}
- let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id);
- let worktrees =
- worktree_info_from_thread_paths(&row.folder_paths, &project_groups);
- threads.push(ThreadEntry {
- metadata: row,
- icon,
- icon_from_external_svg,
- status: AgentThreadStatus::default(),
- workspace: ThreadEntryWorkspace::Closed(worktree_path_list.clone()),
- is_live: false,
- is_background: false,
- is_title_generating: false,
- highlight_positions: Vec::new(),
- worktrees,
- diff_stats: DiffStats::default(),
- });
+ threads.push(make_thread_entry(
+ row,
+ ThreadEntryWorkspace::Closed(worktree_path_list.clone()),
+ ));
}
}
@@ -936,21 +960,21 @@ impl Sidebar {
let session_id = &thread.metadata.session_id;
- let is_thread_workspace_active = match &thread.workspace {
- ThreadEntryWorkspace::Open(thread_workspace) => active_workspace
- .as_ref()
- .is_some_and(|active| active == thread_workspace),
- ThreadEntryWorkspace::Closed(_) => false,
- };
+ let is_active_thread = self.active_entry.as_ref().is_some_and(|entry| {
+ entry.is_active_thread(session_id)
+ && active_workspace
+ .as_ref()
+ .is_some_and(|active| active == entry.workspace())
+ });
if thread.status == AgentThreadStatus::Completed
- && !is_thread_workspace_active
+ && !is_active_thread
&& old_statuses.get(session_id) == Some(&AgentThreadStatus::Running)
{
notified_threads.insert(session_id.clone());
}
- if is_thread_workspace_active && !thread.is_background {
+ if is_active_thread && !thread.is_background {
notified_threads.remove(session_id);
}
}
@@ -1013,9 +1037,8 @@ impl Sidebar {
project_header_indices.push(entries.len());
entries.push(ListEntry::ProjectHeader {
- path_list: path_list.clone(),
+ key: group_key.clone(),
label,
- workspace: representative_workspace.clone(),
highlight_positions: workspace_highlight_positions,
has_running_threads,
waiting_thread_count,
@@ -1027,15 +1050,13 @@ impl Sidebar {
entries.push(thread.into());
}
} else {
- let is_draft_for_workspace = is_active
- && matches!(&self.active_entry, Some(ActiveEntry::Draft(_)))
- && self.active_entry_workspace() == Some(representative_workspace);
+ let is_draft_for_group = is_active
+ && matches!(&self.active_entry, Some(ActiveEntry::Draft(ws)) if group_workspaces.contains(ws));
project_header_indices.push(entries.len());
entries.push(ListEntry::ProjectHeader {
- path_list: path_list.clone(),
+ key: group_key.clone(),
label,
- workspace: representative_workspace.clone(),
highlight_positions: Vec::new(),
has_running_threads,
waiting_thread_count,
@@ -1046,25 +1067,61 @@ impl Sidebar {
continue;
}
- // Emit "New Thread" entries for threadless workspaces
- // and active drafts, right after the header.
- for (workspace, worktrees) in &threadless_workspaces {
- entries.push(ListEntry::NewThread {
- path_list: path_list.clone(),
- workspace: workspace.clone(),
- worktrees: worktrees.clone(),
- });
+ // Emit a DraftThread entry when the active draft belongs to this group.
+ if is_draft_for_group {
+ if let Some(ActiveEntry::Draft(draft_ws)) = &self.active_entry {
+ let ws_path_list = workspace_path_list(draft_ws, cx);
+ let worktrees = worktree_info_from_thread_paths(&ws_path_list, &group_key);
+ entries.push(ListEntry::DraftThread { worktrees });
+ }
}
- if is_draft_for_workspace
- && !threadless_workspaces
- .iter()
- .any(|(ws, _)| ws == representative_workspace)
+
+ // Emit a NewThread entry when:
+ // 1. The group has zero threads (convenient affordance).
+ // 2. The active workspace has linked worktrees but no threads
+ // for the active workspace's specific set of worktrees.
+ let group_has_no_threads = threads.is_empty() && !group_workspaces.is_empty();
+ let active_ws_has_threadless_linked_worktrees = is_active
+ && !is_draft_for_group
+ && active_workspace.as_ref().is_some_and(|active_ws| {
+ let ws_path_list = workspace_path_list(active_ws, cx);
+ let has_linked_worktrees =
+ !worktree_info_from_thread_paths(&ws_path_list, &group_key).is_empty();
+ if !has_linked_worktrees {
+ return false;
+ }
+ let thread_store = ThreadMetadataStore::global(cx);
+ let has_threads_for_ws = thread_store
+ .read(cx)
+ .entries_for_path(&ws_path_list)
+ .next()
+ .is_some()
+ || thread_store
+ .read(cx)
+ .entries_for_main_worktree_path(&ws_path_list)
+ .next()
+ .is_some();
+ !has_threads_for_ws
+ });
+
+ if !is_draft_for_group
+ && (group_has_no_threads || active_ws_has_threadless_linked_worktrees)
{
- let ws_path_list = workspace_path_list(representative_workspace, cx);
- let worktrees = worktree_info_from_thread_paths(&ws_path_list, &project_groups);
+ let worktrees = if active_ws_has_threadless_linked_worktrees {
+ active_workspace
+ .as_ref()
+ .map(|ws| {
+ worktree_info_from_thread_paths(
+ &workspace_path_list(ws, cx),
+ &group_key,
+ )
+ })
+ .unwrap_or_default()
+ } else {
+ Vec::new()
+ };
entries.push(ListEntry::NewThread {
- path_list: path_list.clone(),
- workspace: representative_workspace.clone(),
+ key: group_key.clone(),
worktrees,
});
}
@@ -1110,7 +1167,7 @@ impl Sidebar {
if total > DEFAULT_THREADS_SHOWN {
entries.push(ListEntry::ViewMore {
- path_list: path_list.clone(),
+ key: group_key.clone(),
is_fully_expanded,
});
}
@@ -1198,9 +1255,8 @@ impl Sidebar {
let rendered = match entry {
ListEntry::ProjectHeader {
- path_list,
+ key,
label,
- workspace,
highlight_positions,
has_running_threads,
waiting_thread_count,
@@ -1208,9 +1264,8 @@ impl Sidebar {
} => self.render_project_header(
ix,
false,
- path_list,
+ key,
label,
- workspace,
highlight_positions,
*has_running_threads,
*waiting_thread_count,
@@ -1220,29 +1275,22 @@ impl Sidebar {
),
ListEntry::Thread(thread) => self.render_thread(ix, thread, is_active, is_selected, cx),
ListEntry::ViewMore {
- path_list,
+ key,
is_fully_expanded,
- } => self.render_view_more(ix, path_list, *is_fully_expanded, is_selected, cx),
- ListEntry::NewThread {
- path_list,
- workspace,
- worktrees,
- } => self.render_new_thread(
- ix,
- path_list,
- workspace,
- is_active,
- worktrees,
- is_selected,
- cx,
- ),
+ } => self.render_view_more(ix, key.path_list(), *is_fully_expanded, is_selected, cx),
+ ListEntry::DraftThread { worktrees, .. } => {
+ self.render_draft_thread(ix, is_active, worktrees, is_selected, cx)
+ }
+ ListEntry::NewThread { key, worktrees, .. } => {
+ self.render_new_thread(ix, key, worktrees, is_selected, cx)
+ }
};
if is_group_header_after_first {
v_flex()
.w_full()
.border_t_1()
- .border_color(cx.theme().colors().border.opacity(0.5))
+ .border_color(cx.theme().colors().border)
.child(rendered)
.into_any_element()
} else {
@@ -1253,13 +1301,9 @@ impl Sidebar {
fn render_remote_project_icon(
&self,
ix: usize,
- workspace: &Entity<Workspace>,
- cx: &mut Context<Self>,
+ host: Option<&RemoteConnectionOptions>,
) -> Option<AnyElement> {
- let project = workspace.read(cx).project().read(cx);
- let remote_connection_options = project.remote_connection_options(cx)?;
-
- let remote_icon_per_type = match remote_connection_options {
+ let remote_icon_per_type = match host? {
RemoteConnectionOptions::Wsl(_) => IconName::Linux,
RemoteConnectionOptions::Docker(_) => IconName::Box,
_ => IconName::Server,
@@ -1282,16 +1326,18 @@ impl Sidebar {
&self,
ix: usize,
is_sticky: bool,
- path_list: &PathList,
+ key: &ProjectGroupKey,
label: &SharedString,
- workspace: &Entity<Workspace>,
highlight_positions: &[usize],
has_running_threads: bool,
waiting_thread_count: usize,
is_active: bool,
- is_selected: bool,
+ is_focused: bool,
cx: &mut Context<Self>,
) -> AnyElement {
+ let path_list = key.path_list();
+ let host = key.host();
+
let id_prefix = if is_sticky { "sticky-" } else { "" };
let id = SharedString::from(format!("{id_prefix}project-header-{ix}"));
let disclosure_id = SharedString::from(format!("disclosure-{ix}"));
@@ -1304,16 +1350,15 @@ impl Sidebar {
(IconName::ChevronDown, "Collapse Project")
};
- let has_new_thread_entry = self
- .contents
- .entries
- .get(ix + 1)
- .is_some_and(|entry| matches!(entry, ListEntry::NewThread { .. }));
+ let has_new_thread_entry = self.contents.entries.get(ix + 1).is_some_and(|entry| {
+ matches!(
+ entry,
+ ListEntry::NewThread { .. } | ListEntry::DraftThread { .. }
+ )
+ });
let show_new_thread_button = !has_new_thread_entry && !self.has_filter_query(cx);
- let workspace_for_remove = workspace.clone();
- let workspace_for_menu = workspace.clone();
- let workspace_for_open = workspace.clone();
+ let workspace = self.workspace_for_group(path_list, cx);
let path_list_for_toggle = path_list.clone();
let path_list_for_collapse = path_list.clone();
@@ -1321,11 +1366,11 @@ impl Sidebar {
let label = if highlight_positions.is_empty() {
Label::new(label.clone())
- .color(Color::Muted)
+ .when(!is_active, |this| this.color(Color::Muted))
.into_any_element()
} else {
HighlightedLabel::new(label.clone(), highlight_positions.to_vec())
- .color(Color::Muted)
+ .when(!is_active, |this| this.color(Color::Muted))
.into_any_element()
};
@@ -1343,14 +1388,13 @@ impl Sidebar {
.pr_1p5()
.border_1()
.map(|this| {
- if is_selected {
+ if is_focused {
this.border_color(color.border_focused)
} else {
this.border_color(gpui::transparent_black())
}
})
.justify_between()
- .hover(|s| s.bg(hover_color))
.child(
h_flex()
.when(!is_active, |this| this.cursor_pointer())
@@ -1371,7 +1415,7 @@ impl Sidebar {
)
.child(label)
.when_some(
- self.render_remote_project_icon(ix, workspace, cx),
+ self.render_remote_project_icon(ix, host.as_ref()),
|this, icon| this.child(icon),
)
.when(is_collapsed, |this| {
@@ -1404,10 +1448,7 @@ impl Sidebar {
})
}),
)
- .child({
- let workspace_for_new_thread = workspace.clone();
- let path_list_for_new_thread = path_list.clone();
-
+ .child(
h_flex()
.when(self.project_header_menu_ix != Some(ix), |this| {
this.visible_on_hover(group_name)
@@ -1415,13 +1456,7 @@ impl Sidebar {
.on_mouse_down(gpui::MouseButton::Left, |_, _, cx| {
cx.stop_propagation();
})
- .child(self.render_project_header_menu(
- ix,
- id_prefix,
- &workspace_for_menu,
- &workspace_for_remove,
- cx,
- ))
+ .child(self.render_project_header_menu(ix, id_prefix, key, cx))
.when(view_more_expanded && !is_collapsed, |this| {
this.child(
IconButton::new(
@@ -1431,7 +1466,6 @@ impl Sidebar {
IconName::ListCollapse,
)
.icon_size(IconSize::Small)
- .icon_color(Color::Muted)
.tooltip(Tooltip::text("Collapse Displayed Threads"))
.on_click(cx.listener({
let path_list_for_collapse = path_list_for_collapse.clone();
@@ -1444,51 +1478,50 @@ impl Sidebar {
})),
)
})
- .when(show_new_thread_button, |this| {
- this.child(
- IconButton::new(
- SharedString::from(format!(
- "{id_prefix}project-header-new-thread-{ix}",
+ .when_some(
+ workspace.filter(|_| show_new_thread_button),
+ |this, workspace| {
+ let path_list = path_list.clone();
+ this.child(
+ IconButton::new(
+ SharedString::from(format!(
+ "{id_prefix}project-header-new-thread-{ix}",
+ )),
+ IconName::Plus,
+ )
+ .icon_size(IconSize::Small)
+ .tooltip(Tooltip::text("New Thread"))
+ .on_click(cx.listener(
+ move |this, _, window, cx| {
+ this.collapsed_groups.remove(&path_list);
+ this.selection = None;
+ this.create_new_thread(&workspace, window, cx);
+ },
)),
- IconName::Plus,
)
- .icon_size(IconSize::Small)
- .icon_color(Color::Muted)
- .tooltip(Tooltip::text("New Thread"))
- .on_click(cx.listener({
- let workspace_for_new_thread = workspace_for_new_thread.clone();
- let path_list_for_new_thread = path_list_for_new_thread.clone();
- move |this, _, window, cx| {
- // Uncollapse the group if collapsed so
- // the new-thread entry becomes visible.
- this.collapsed_groups.remove(&path_list_for_new_thread);
- this.selection = None;
- this.create_new_thread(&workspace_for_new_thread, window, cx);
- }
- })),
- )
- })
- })
+ },
+ ),
+ )
.when(!is_active, |this| {
- this.tooltip(Tooltip::text("Activate Workspace"))
- .on_click(cx.listener({
- move |this, _, window, cx| {
- this.active_entry =
- Some(ActiveEntry::Draft(workspace_for_open.clone()));
+ let path_list = path_list.clone();
+ this.cursor_pointer()
+ .hover(|s| s.bg(hover_color))
+ .tooltip(Tooltip::text("Open Workspace"))
+ .on_click(cx.listener(move |this, _, window, cx| {
+ if let Some(workspace) = this.workspace_for_group(&path_list, cx) {
+ this.active_entry = Some(ActiveEntry::Draft(workspace.clone()));
if let Some(multi_workspace) = this.multi_workspace.upgrade() {
multi_workspace.update(cx, |multi_workspace, cx| {
- multi_workspace.activate(
- workspace_for_open.clone(),
- window,
- cx,
- );
+ multi_workspace.activate(workspace.clone(), window, cx);
});
}
- if AgentPanel::is_visible(&workspace_for_open, cx) {
- workspace_for_open.update(cx, |workspace, cx| {
+ if AgentPanel::is_visible(&workspace, cx) {
+ workspace.update(cx, |workspace, cx| {
workspace.focus_panel::<AgentPanel>(window, cx);
});
}
+ } else {
+ this.open_workspace_for_group(&path_list, window, cx);
}
}))
})
@@ -1499,14 +1532,12 @@ impl Sidebar {
&self,
ix: usize,
id_prefix: &str,
- workspace: &Entity<Workspace>,
- workspace_for_remove: &Entity<Workspace>,
+ project_group_key: &ProjectGroupKey,
cx: &mut Context<Self>,
) -> impl IntoElement {
- let workspace_for_menu = workspace.clone();
- let workspace_for_remove = workspace_for_remove.clone();
let multi_workspace = self.multi_workspace.clone();
let this = cx.weak_entity();
+ let project_group_key = project_group_key.clone();
PopoverMenu::new(format!("{id_prefix}project-header-menu-{ix}"))
.on_open(Rc::new({
@@ -1520,116 +1551,102 @@ impl Sidebar {
}
}))
.menu(move |window, cx| {
- let workspace = workspace_for_menu.clone();
- let workspace_for_remove = workspace_for_remove.clone();
let multi_workspace = multi_workspace.clone();
+ let project_group_key = project_group_key.clone();
let menu = ContextMenu::build_persistent(window, cx, move |menu, _window, cx| {
- let worktrees: Vec<_> = workspace
- .read(cx)
- .visible_worktrees(cx)
- .map(|worktree| {
- let worktree_read = worktree.read(cx);
- let id = worktree_read.id();
- let name: SharedString =
- worktree_read.root_name().as_unix_str().to_string().into();
- (id, name)
- })
- .collect();
-
- let worktree_count = worktrees.len();
-
let mut menu = menu
.header("Project Folders")
.end_slot_action(Box::new(menu::EndSlot));
- for (worktree_id, name) in &worktrees {
- let worktree_id = *worktree_id;
- let workspace_for_worktree = workspace.clone();
- let workspace_for_remove_worktree = workspace_for_remove.clone();
- let multi_workspace_for_worktree = multi_workspace.clone();
-
- let remove_handler = move |window: &mut Window, cx: &mut App| {
- if worktree_count <= 1 {
- if let Some(mw) = multi_workspace_for_worktree.upgrade() {
- let ws = workspace_for_remove_worktree.clone();
- mw.update(cx, |multi_workspace, cx| {
- multi_workspace.remove(&ws, window, cx);
- });
- }
- } else {
- workspace_for_worktree.update(cx, |workspace, cx| {
- workspace.project().update(cx, |project, cx| {
- project.remove_worktree(worktree_id, cx);
- });
- });
- }
+ for path in project_group_key.path_list().paths() {
+ let Some(name) = path.file_name() else {
+ continue;
};
-
+ let name: SharedString = name.to_string_lossy().into_owned().into();
+ let path = path.clone();
+ let project_group_key = project_group_key.clone();
+ let multi_workspace = multi_workspace.clone();
menu = menu.entry_with_end_slot_on_hover(
name.clone(),
None,
|_, _| {},
IconName::Close,
"Remove Folder".into(),
- remove_handler,
+ move |_window, cx| {
+ multi_workspace
+ .update(cx, |multi_workspace, cx| {
+ multi_workspace.remove_folder_from_project_group(
+ &project_group_key,
+ &path,
+ cx,
+ );
+ })
+ .ok();
+ },
);
}
- let workspace_for_add = workspace.clone();
- let multi_workspace_for_add = multi_workspace.clone();
let menu = menu.separator().entry(
"Add Folder to Project",
Some(Box::new(AddFolderToProject)),
- move |window, cx| {
- if let Some(mw) = multi_workspace_for_add.upgrade() {
- mw.update(cx, |mw, cx| {
- mw.activate(workspace_for_add.clone(), window, cx);
- });
+ {
+ let project_group_key = project_group_key.clone();
+ let multi_workspace = multi_workspace.clone();
+ move |window, cx| {
+ multi_workspace
+ .update(cx, |multi_workspace, cx| {
+ multi_workspace.prompt_to_add_folders_to_project_group(
+ &project_group_key,
+ window,
+ cx,
+ );
+ })
+ .ok();
}
- workspace_for_add.update(cx, |workspace, cx| {
- workspace.add_folder_to_project(&AddFolderToProject, window, cx);
- });
},
);
- let workspace_count = multi_workspace
+ let group_count = multi_workspace
.upgrade()
- .map_or(0, |mw| mw.read(cx).workspaces().len());
- let menu = if workspace_count > 1 {
- let workspace_for_move = workspace.clone();
- let multi_workspace_for_move = multi_workspace.clone();
+ .map_or(0, |mw| mw.read(cx).project_group_keys().count());
+ let menu = if group_count > 1 {
+ let project_group_key = project_group_key.clone();
+ let multi_workspace = multi_workspace.clone();
menu.entry(
"Move to New Window",
Some(Box::new(
zed_actions::agents_sidebar::MoveWorkspaceToNewWindow,
)),
move |window, cx| {
- if let Some(mw) = multi_workspace_for_move.upgrade() {
- mw.update(cx, |multi_workspace, cx| {
- multi_workspace.move_workspace_to_new_window(
- &workspace_for_move,
+ multi_workspace
+ .update(cx, |multi_workspace, cx| {
+ multi_workspace.move_project_group_to_new_window(
+ &project_group_key,
window,
cx,
);
- });
- }
+ })
+ .ok();
},
)
} else {
menu
};
- let workspace_for_remove = workspace_for_remove.clone();
- let multi_workspace_for_remove = multi_workspace.clone();
+ let project_group_key = project_group_key.clone();
+ let multi_workspace = multi_workspace.clone();
menu.separator()
.entry("Remove Project", None, move |window, cx| {
- if let Some(mw) = multi_workspace_for_remove.upgrade() {
- let ws = workspace_for_remove.clone();
- mw.update(cx, |multi_workspace, cx| {
- multi_workspace.remove(&ws, window, cx);
- });
- }
+ multi_workspace
+ .update(cx, |multi_workspace, cx| {
+ multi_workspace.remove_project_group(
+ &project_group_key,
+ window,
+ cx,
+ );
+ })
+ .ok();
})
});
@@ -12,7 +12,10 @@ use gpui::TestAppContext;
use pretty_assertions::assert_eq;
use project::AgentId;
use settings::SettingsStore;
-use std::{path::PathBuf, sync::Arc};
+use std::{
+ path::{Path, PathBuf},
+ sync::Arc,
+};
use util::path_list::PathList;
fn init_test(cx: &mut TestAppContext) {
@@ -74,6 +77,18 @@ async fn init_test_project(
fn setup_sidebar(
multi_workspace: &Entity<MultiWorkspace>,
cx: &mut gpui::VisualTestContext,
+) -> Entity<Sidebar> {
+ let sidebar = setup_sidebar_closed(multi_workspace, cx);
+ multi_workspace.update_in(cx, |mw, window, cx| {
+ mw.toggle_sidebar(window, cx);
+ });
+ cx.run_until_parked();
+ sidebar
+}
+
+fn setup_sidebar_closed(
+ multi_workspace: &Entity<MultiWorkspace>,
+ cx: &mut gpui::VisualTestContext,
) -> Entity<Sidebar> {
let multi_workspace = multi_workspace.clone();
let sidebar =
@@ -85,14 +100,18 @@ fn setup_sidebar(
sidebar
}
-async fn save_n_test_threads(count: u32, path_list: &PathList, cx: &mut gpui::VisualTestContext) {
+async fn save_n_test_threads(
+ count: u32,
+ project: &Entity<project::Project>,
+ cx: &mut gpui::VisualTestContext,
+) {
for i in 0..count {
save_thread_metadata(
acp::SessionId::new(Arc::from(format!("thread-{}", i))),
format!("Thread {}", i + 1).into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, i).unwrap(),
None,
- path_list.clone(),
+ project,
cx,
)
}
@@ -101,7 +120,7 @@ async fn save_n_test_threads(count: u32, path_list: &PathList, cx: &mut gpui::Vi
async fn save_test_thread_metadata(
session_id: &acp::SessionId,
- path_list: PathList,
+ project: &Entity<project::Project>,
cx: &mut TestAppContext,
) {
save_thread_metadata(
@@ -109,7 +128,7 @@ async fn save_test_thread_metadata(
"Test".into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(),
None,
- path_list,
+ project,
cx,
)
}
@@ -117,7 +136,7 @@ async fn save_test_thread_metadata(
async fn save_named_thread_metadata(
session_id: &str,
title: &str,
- path_list: &PathList,
+ project: &Entity<project::Project>,
cx: &mut gpui::VisualTestContext,
) {
save_thread_metadata(
@@ -125,7 +144,7 @@ async fn save_named_thread_metadata(
SharedString::from(title.to_string()),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(),
None,
- path_list.clone(),
+ project,
cx,
);
cx.run_until_parked();
@@ -136,34 +155,36 @@ fn save_thread_metadata(
title: SharedString,
updated_at: DateTime<Utc>,
created_at: Option<DateTime<Utc>>,
- path_list: PathList,
+ project: &Entity<project::Project>,
cx: &mut TestAppContext,
) {
- let metadata = ThreadMetadata {
- session_id,
- agent_id: agent::ZED_AGENT_ID.clone(),
- title,
- updated_at,
- created_at,
- folder_paths: path_list,
- archived: false,
- };
cx.update(|cx| {
- ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx))
+ let (folder_paths, main_worktree_paths) = {
+ let project_ref = project.read(cx);
+ let paths: Vec<Arc<Path>> = project_ref
+ .visible_worktrees(cx)
+ .map(|worktree| worktree.read(cx).abs_path())
+ .collect();
+ let folder_paths = PathList::new(&paths);
+ let main_worktree_paths = project_ref.project_group_key(cx).path_list().clone();
+ (folder_paths, main_worktree_paths)
+ };
+ let metadata = ThreadMetadata {
+ session_id,
+ agent_id: agent::ZED_AGENT_ID.clone(),
+ title,
+ updated_at,
+ created_at,
+ folder_paths,
+ main_worktree_paths,
+ archived: false,
+ };
+ ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx));
});
cx.run_until_parked();
}
-fn open_and_focus_sidebar(sidebar: &Entity<Sidebar>, cx: &mut gpui::VisualTestContext) {
- let multi_workspace = sidebar.read_with(cx, |s, _| s.multi_workspace.upgrade());
- if let Some(multi_workspace) = multi_workspace {
- multi_workspace.update_in(cx, |mw, window, cx| {
- if !mw.sidebar_open() {
- mw.toggle_sidebar(window, cx);
- }
- });
- }
- cx.run_until_parked();
+fn focus_sidebar(sidebar: &Entity<Sidebar>, cx: &mut gpui::VisualTestContext) {
sidebar.update_in(cx, |_, window, cx| {
cx.focus_self(window);
});
@@ -189,11 +210,11 @@ fn visible_entries_as_strings(
match entry {
ListEntry::ProjectHeader {
label,
- path_list,
+ key,
highlight_positions: _,
..
} => {
- let icon = if sidebar.collapsed_groups.contains(path_list) {
+ let icon = if sidebar.collapsed_groups.contains(key.path_list()) {
">"
} else {
"v"
@@ -244,6 +265,22 @@ fn visible_entries_as_strings(
format!(" + View More{}", selected)
}
}
+ ListEntry::DraftThread { worktrees, .. } => {
+ let worktree = if worktrees.is_empty() {
+ String::new()
+ } else {
+ let mut seen = Vec::new();
+ let mut chips = Vec::new();
+ for wt in worktrees {
+ if !seen.contains(&wt.name) {
+ seen.push(wt.name.clone());
+ chips.push(format!("{{{}}}", wt.name));
+ }
+ }
+ format!(" {}", chips.join(", "))
+ };
+ format!(" [~ Draft{}]{}", worktree, selected)
+ }
ListEntry::NewThread { worktrees, .. } => {
let worktree = if worktrees.is_empty() {
String::new()
@@ -270,11 +307,14 @@ fn visible_entries_as_strings(
async fn test_serialization_round_trip(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
- save_n_test_threads(3, &path_list, cx).await;
+ save_n_test_threads(3, &project, cx).await;
+
+ let path_list = project.read_with(cx, |project, cx| {
+ project.project_group_key(cx).path_list().clone()
+ });
// Set a custom width, collapse the group, and expand "View More".
sidebar.update_in(cx, |sidebar, window, cx| {
@@ -433,17 +473,15 @@ async fn test_single_workspace_no_threads(cx: &mut TestAppContext) {
async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
-
save_thread_metadata(
acp::SessionId::new(Arc::from("thread-1")),
"Fix crash in project panel".into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 3, 0, 0, 0).unwrap(),
None,
- path_list.clone(),
+ &project,
cx,
);
@@ -452,7 +490,7 @@ async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) {
"Add inline diff view".into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(),
None,
- path_list,
+ &project,
cx,
);
cx.run_until_parked();
@@ -474,18 +512,16 @@ async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) {
async fn test_workspace_lifecycle(cx: &mut TestAppContext) {
let project = init_test_project("/project-a", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
// Single workspace with a thread
- let path_list = PathList::new(&[std::path::PathBuf::from("/project-a")]);
-
save_thread_metadata(
acp::SessionId::new(Arc::from("thread-a1")),
"Thread A1".into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(),
None,
- path_list,
+ &project,
cx,
);
cx.run_until_parked();
@@ -511,7 +547,7 @@ async fn test_workspace_lifecycle(cx: &mut TestAppContext) {
// Remove the second workspace
multi_workspace.update_in(cx, |mw, window, cx| {
- let workspace = mw.workspaces()[1].clone();
+ let workspace = mw.workspaces().nth(1).cloned().unwrap();
mw.remove(&workspace, window, cx);
});
cx.run_until_parked();
@@ -526,11 +562,10 @@ async fn test_workspace_lifecycle(cx: &mut TestAppContext) {
async fn test_view_more_pagination(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
- save_n_test_threads(12, &path_list, cx).await;
+ save_n_test_threads(12, &project, cx).await;
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
@@ -553,12 +588,15 @@ async fn test_view_more_pagination(cx: &mut TestAppContext) {
async fn test_view_more_batched_expansion(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
// Create 17 threads: initially shows 5, then 10, then 15, then all 17 with Collapse
- save_n_test_threads(17, &path_list, cx).await;
+ save_n_test_threads(17, &project, cx).await;
+
+ let path_list = project.read_with(cx, |project, cx| {
+ project.project_group_key(cx).path_list().clone()
+ });
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
@@ -569,7 +607,7 @@ async fn test_view_more_batched_expansion(cx: &mut TestAppContext) {
assert!(entries.iter().any(|e| e.contains("View More")));
// Focus and navigate to View More, then confirm to expand by one batch
- open_and_focus_sidebar(&sidebar, cx);
+ focus_sidebar(&sidebar, cx);
for _ in 0..7 {
cx.dispatch_action(SelectNext);
}
@@ -625,11 +663,14 @@ async fn test_view_more_batched_expansion(cx: &mut TestAppContext) {
async fn test_collapse_and_expand_group(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
- save_n_test_threads(1, &path_list, cx).await;
+ save_n_test_threads(1, &project, cx).await;
+
+ let path_list = project.read_with(cx, |project, cx| {
+ project.project_group_key(cx).path_list().clone()
+ });
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
@@ -681,9 +722,8 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
s.contents.entries = vec![
// Expanded project header
ListEntry::ProjectHeader {
- path_list: expanded_path.clone(),
+ key: project::ProjectGroupKey::new(None, expanded_path.clone()),
label: "expanded-project".into(),
- workspace: workspace.clone(),
highlight_positions: Vec::new(),
has_running_threads: false,
waiting_thread_count: 0,
@@ -694,6 +734,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
session_id: acp::SessionId::new(Arc::from("t-1")),
agent_id: AgentId::new("zed-agent"),
folder_paths: PathList::default(),
+ main_worktree_paths: PathList::default(),
title: "Completed thread".into(),
updated_at: Utc::now(),
created_at: Some(Utc::now()),
@@ -716,6 +757,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
session_id: acp::SessionId::new(Arc::from("t-2")),
agent_id: AgentId::new("zed-agent"),
folder_paths: PathList::default(),
+ main_worktree_paths: PathList::default(),
title: "Running thread".into(),
updated_at: Utc::now(),
created_at: Some(Utc::now()),
@@ -738,6 +780,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
session_id: acp::SessionId::new(Arc::from("t-3")),
agent_id: AgentId::new("zed-agent"),
folder_paths: PathList::default(),
+ main_worktree_paths: PathList::default(),
title: "Error thread".into(),
updated_at: Utc::now(),
created_at: Some(Utc::now()),
@@ -760,6 +803,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
session_id: acp::SessionId::new(Arc::from("t-4")),
agent_id: AgentId::new("zed-agent"),
folder_paths: PathList::default(),
+ main_worktree_paths: PathList::default(),
title: "Waiting thread".into(),
updated_at: Utc::now(),
created_at: Some(Utc::now()),
@@ -782,6 +826,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
session_id: acp::SessionId::new(Arc::from("t-5")),
agent_id: AgentId::new("zed-agent"),
folder_paths: PathList::default(),
+ main_worktree_paths: PathList::default(),
title: "Notified thread".into(),
updated_at: Utc::now(),
created_at: Some(Utc::now()),
@@ -800,14 +845,13 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
}),
// View More entry
ListEntry::ViewMore {
- path_list: expanded_path.clone(),
+ key: project::ProjectGroupKey::new(None, expanded_path.clone()),
is_fully_expanded: false,
},
// Collapsed project header
ListEntry::ProjectHeader {
- path_list: collapsed_path.clone(),
+ key: project::ProjectGroupKey::new(None, collapsed_path.clone()),
label: "collapsed-project".into(),
- workspace: workspace.clone(),
highlight_positions: Vec::new(),
has_running_threads: false,
waiting_thread_count: 0,
@@ -863,11 +907,10 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) {
async fn test_keyboard_select_next_and_previous(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
- save_n_test_threads(3, &path_list, cx).await;
+ save_n_test_threads(3, &project, cx).await;
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
@@ -875,7 +918,7 @@ async fn test_keyboard_select_next_and_previous(cx: &mut TestAppContext) {
// Entries: [header, thread3, thread2, thread1]
// Focusing the sidebar does not set a selection; select_next/select_previous
// handle None gracefully by starting from the first or last entry.
- open_and_focus_sidebar(&sidebar, cx);
+ focus_sidebar(&sidebar, cx);
assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None);
// First SelectNext from None starts at index 0
@@ -923,15 +966,14 @@ async fn test_keyboard_select_next_and_previous(cx: &mut TestAppContext) {
async fn test_keyboard_select_first_and_last(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
- save_n_test_threads(3, &path_list, cx).await;
+ save_n_test_threads(3, &project, cx).await;
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
- open_and_focus_sidebar(&sidebar, cx);
+ focus_sidebar(&sidebar, cx);
// SelectLast jumps to the end
cx.dispatch_action(SelectLast);
@@ -954,7 +996,7 @@ async fn test_keyboard_focus_in_does_not_set_selection(cx: &mut TestAppContext)
// Open the sidebar so it's rendered, then focus it to trigger focus_in.
// focus_in no longer sets a default selection.
- open_and_focus_sidebar(&sidebar, cx);
+ focus_sidebar(&sidebar, cx);
assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None);
// Manually set a selection, blur, then refocus — selection should be preserved
@@ -978,11 +1020,10 @@ async fn test_keyboard_focus_in_does_not_set_selection(cx: &mut TestAppContext)
async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
- save_n_test_threads(1, &path_list, cx).await;
+ save_n_test_threads(1, &project, cx).await;
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
@@ -992,7 +1033,7 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA
);
// Focus the sidebar and select the header (index 0)
- open_and_focus_sidebar(&sidebar, cx);
+ focus_sidebar(&sidebar, cx);
sidebar.update_in(cx, |sidebar, _window, _cx| {
sidebar.selection = Some(0);
});
@@ -1020,11 +1061,10 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA
async fn test_keyboard_confirm_on_view_more_expands(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
- save_n_test_threads(8, &path_list, cx).await;
+ save_n_test_threads(8, &project, cx).await;
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
@@ -1034,7 +1074,7 @@ async fn test_keyboard_confirm_on_view_more_expands(cx: &mut TestAppContext) {
assert!(entries.iter().any(|e| e.contains("View More")));
// Focus sidebar (selection starts at None), then navigate down to the "View More" entry (index 6)
- open_and_focus_sidebar(&sidebar, cx);
+ focus_sidebar(&sidebar, cx);
for _ in 0..7 {
cx.dispatch_action(SelectNext);
}
@@ -1055,11 +1095,10 @@ async fn test_keyboard_confirm_on_view_more_expands(cx: &mut TestAppContext) {
async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
- save_n_test_threads(1, &path_list, cx).await;
+ save_n_test_threads(1, &project, cx).await;
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
@@ -1069,7 +1108,7 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex
);
// Focus sidebar and manually select the header (index 0). Press left to collapse.
- open_and_focus_sidebar(&sidebar, cx);
+ focus_sidebar(&sidebar, cx);
sidebar.update_in(cx, |sidebar, _window, _cx| {
sidebar.selection = Some(0);
});
@@ -1100,16 +1139,15 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex
async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
- save_n_test_threads(1, &path_list, cx).await;
+ save_n_test_threads(1, &project, cx).await;
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
// Focus sidebar (selection starts at None), then navigate down to the thread (child)
- open_and_focus_sidebar(&sidebar, cx);
+ focus_sidebar(&sidebar, cx);
cx.dispatch_action(SelectNext);
cx.dispatch_action(SelectNext);
assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1));
@@ -1144,7 +1182,7 @@ async fn test_keyboard_navigation_on_empty_list(cx: &mut TestAppContext) {
);
// Focus sidebar — focus_in does not set a selection
- open_and_focus_sidebar(&sidebar, cx);
+ focus_sidebar(&sidebar, cx);
assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None);
// First SelectNext from None starts at index 0 (header)
@@ -1168,16 +1206,15 @@ async fn test_keyboard_navigation_on_empty_list(cx: &mut TestAppContext) {
async fn test_selection_clamps_after_entry_removal(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
- save_n_test_threads(1, &path_list, cx).await;
+ save_n_test_threads(1, &project, cx).await;
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
// Focus sidebar (selection starts at None), navigate down to the thread (index 1)
- open_and_focus_sidebar(&sidebar, cx);
+ focus_sidebar(&sidebar, cx);
cx.dispatch_action(SelectNext);
cx.dispatch_action(SelectNext);
assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1));
@@ -1245,15 +1282,13 @@ async fn test_parallel_threads_shown_with_live_status(cx: &mut TestAppContext) {
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
-
// Open thread A and keep it generating.
let connection = StubAgentConnection::new();
open_thread_with_connection(&panel, connection.clone(), cx);
send_message(&panel, cx);
let session_id_a = active_session_id(&panel, cx);
- save_test_thread_metadata(&session_id_a, path_list.clone(), cx).await;
+ save_test_thread_metadata(&session_id_a, &project, cx).await;
cx.update(|_, cx| {
connection.send_update(
@@ -1272,7 +1307,7 @@ async fn test_parallel_threads_shown_with_live_status(cx: &mut TestAppContext) {
send_message(&panel, cx);
let session_id_b = active_session_id(&panel, cx);
- save_test_thread_metadata(&session_id_b, path_list.clone(), cx).await;
+ save_test_thread_metadata(&session_id_b, &project, cx).await;
cx.run_until_parked();
@@ -1291,15 +1326,13 @@ async fn test_background_thread_completion_triggers_notification(cx: &mut TestAp
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx));
let (sidebar, panel_a) = setup_sidebar_with_agent_panel(&multi_workspace, cx);
- let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]);
-
// Open thread on workspace A and keep it generating.
let connection_a = StubAgentConnection::new();
open_thread_with_connection(&panel_a, connection_a.clone(), cx);
send_message(&panel_a, cx);
let session_id_a = active_session_id(&panel_a, cx);
- save_test_thread_metadata(&session_id_a, path_list_a.clone(), cx).await;
+ save_test_thread_metadata(&session_id_a, &project_a, cx).await;
cx.update(|_, cx| {
connection_a.send_update(
@@ -1349,11 +1382,9 @@ fn type_in_search(sidebar: &Entity<Sidebar>, query: &str, cx: &mut gpui::VisualT
async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
-
for (id, title, hour) in [
("t-1", "Fix crash in project panel", 3),
("t-2", "Add inline diff view", 2),
@@ -1364,7 +1395,7 @@ async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext)
title.into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(),
None,
- path_list.clone(),
+ &project,
cx,
);
}
@@ -1402,17 +1433,15 @@ async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) {
// Search should match case-insensitively so they can still find it.
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
-
save_thread_metadata(
acp::SessionId::new(Arc::from("thread-1")),
"Fix Crash In Project Panel".into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(),
None,
- path_list,
+ &project,
cx,
);
cx.run_until_parked();
@@ -1444,18 +1473,16 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex
// to dismiss the filter and see the full list again.
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
-
for (id, title, hour) in [("t-1", "Alpha thread", 2), ("t-2", "Beta thread", 1)] {
save_thread_metadata(
acp::SessionId::new(Arc::from(id)),
title.into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(),
None,
- path_list.clone(),
+ &project,
cx,
)
}
@@ -1468,7 +1495,7 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex
);
// User types a search query to filter down.
- open_and_focus_sidebar(&sidebar, cx);
+ focus_sidebar(&sidebar, cx);
type_in_search(&sidebar, "alpha", cx);
assert_eq!(
visible_entries_as_strings(&sidebar, cx),
@@ -1493,11 +1520,9 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex
async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppContext) {
let project_a = init_test_project("/project-a", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]);
-
for (id, title, hour) in [
("a1", "Fix bug in sidebar", 2),
("a2", "Add tests for editor", 1),
@@ -1507,7 +1532,7 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC
title.into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(),
None,
- path_list_a.clone(),
+ &project_a,
cx,
)
}
@@ -1518,7 +1543,9 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC
});
cx.run_until_parked();
- let path_list_b = PathList::new::<std::path::PathBuf>(&[]);
+ let project_b = multi_workspace.read_with(cx, |mw, cx| {
+ mw.workspaces().nth(1).unwrap().read(cx).project().clone()
+ });
for (id, title, hour) in [
("b1", "Refactor sidebar layout", 3),
@@ -1529,7 +1556,7 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC
title.into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(),
None,
- path_list_b.clone(),
+ &project_b,
cx,
)
}
@@ -1575,11 +1602,9 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC
async fn test_search_matches_workspace_name(cx: &mut TestAppContext) {
let project_a = init_test_project("/alpha-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list_a = PathList::new(&[std::path::PathBuf::from("/alpha-project")]);
-
for (id, title, hour) in [
("a1", "Fix bug in sidebar", 2),
("a2", "Add tests for editor", 1),
@@ -1589,7 +1614,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) {
title.into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(),
None,
- path_list_a.clone(),
+ &project_a,
cx,
)
}
@@ -1600,7 +1625,9 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) {
});
cx.run_until_parked();
- let path_list_b = PathList::new::<std::path::PathBuf>(&[]);
+ let project_b = multi_workspace.read_with(cx, |mw, cx| {
+ mw.workspaces().nth(1).unwrap().read(cx).project().clone()
+ });
for (id, title, hour) in [
("b1", "Refactor sidebar layout", 3),
@@ -1611,7 +1638,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) {
title.into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(),
None,
- path_list_b.clone(),
+ &project_b,
cx,
)
}
@@ -1677,11 +1704,9 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) {
async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
-
// Create 8 threads. The oldest one has a unique name and will be
// behind View More (only 5 shown by default).
for i in 0..8u32 {
@@ -1695,7 +1720,7 @@ async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppConte
title.into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, i).unwrap(),
None,
- path_list.clone(),
+ &project,
cx,
)
}
@@ -1729,24 +1754,22 @@ async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppConte
async fn test_search_finds_threads_inside_collapsed_groups(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
-
save_thread_metadata(
acp::SessionId::new(Arc::from("thread-1")),
"Important thread".into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(),
None,
- path_list,
+ &project,
cx,
);
cx.run_until_parked();
// User focuses the sidebar and collapses the group using keyboard:
// manually select the header, then press SelectParent to collapse.
- open_and_focus_sidebar(&sidebar, cx);
+ focus_sidebar(&sidebar, cx);
sidebar.update_in(cx, |sidebar, _window, _cx| {
sidebar.selection = Some(0);
});
@@ -1770,11 +1793,9 @@ async fn test_search_finds_threads_inside_collapsed_groups(cx: &mut TestAppConte
async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
-
for (id, title, hour) in [
("t-1", "Fix crash in panel", 3),
("t-2", "Fix lint warnings", 2),
@@ -1785,13 +1806,13 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext)
title.into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(),
None,
- path_list.clone(),
+ &project,
cx,
)
}
cx.run_until_parked();
- open_and_focus_sidebar(&sidebar, cx);
+ focus_sidebar(&sidebar, cx);
// User types "fix" — two threads match.
type_in_search(&sidebar, "fix", cx);
@@ -1832,7 +1853,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext)
async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
multi_workspace.update_in(cx, |mw, window, cx| {
@@ -1840,14 +1861,19 @@ async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppC
});
cx.run_until_parked();
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
+ let (workspace_0, workspace_1) = multi_workspace.read_with(cx, |mw, _| {
+ (
+ mw.workspaces().next().unwrap().clone(),
+ mw.workspaces().nth(1).unwrap().clone(),
+ )
+ });
save_thread_metadata(
acp::SessionId::new(Arc::from("hist-1")),
"Historical Thread".into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 6, 1, 0, 0, 0).unwrap(),
None,
- path_list,
+ &project,
cx,
);
cx.run_until_parked();
@@ -1861,13 +1887,13 @@ async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppC
// Switch to workspace 1 so we can verify the confirm switches back.
multi_workspace.update_in(cx, |mw, window, cx| {
- let workspace = mw.workspaces()[1].clone();
+ let workspace = mw.workspaces().nth(1).unwrap().clone();
mw.activate(workspace, window, cx);
});
cx.run_until_parked();
assert_eq!(
- multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()),
- 1
+ multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()),
+ workspace_1
);
// Confirm on the historical (non-live) thread at index 1.
@@ -1881,8 +1907,8 @@ async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppC
cx.run_until_parked();
assert_eq!(
- multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()),
- 0
+ multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()),
+ workspace_0
);
}
@@ -1890,17 +1916,15 @@ async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppC
async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let sidebar = setup_sidebar(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
-
save_thread_metadata(
acp::SessionId::new(Arc::from("t-1")),
"Thread A".into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(),
None,
- path_list.clone(),
+ &project,
cx,
);
@@ -1909,7 +1933,7 @@ async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppCo
"Thread B".into(),
chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(),
None,
- path_list,
+ &project,
cx,
);
@@ -1957,8 +1981,6 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext)
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx);
- let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]);
-
let connection = StubAgentConnection::new();
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
acp::ContentChunk::new("Hi there!".into()),
@@ -1967,7 +1989,7 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext)
send_message(&panel, cx);
let session_id = active_session_id(&panel, cx);
- save_test_thread_metadata(&session_id, path_list.clone(), cx).await;
+ save_test_thread_metadata(&session_id, &project, cx).await;
cx.run_until_parked();
assert_eq!(
@@ -2005,8 +2027,6 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx));
let (sidebar, panel_a) = setup_sidebar_with_agent_panel(&multi_workspace, cx);
- let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]);
-
// Save a thread so it appears in the list.
let connection_a = StubAgentConnection::new();
connection_a.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
@@ -2015,7 +2035,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
open_thread_with_connection(&panel_a, connection_a, cx);
send_message(&panel_a, cx);
let session_id_a = active_session_id(&panel_a, cx);
- save_test_thread_metadata(&session_id_a, path_list_a.clone(), cx).await;
+ save_test_thread_metadata(&session_id_a, &project_a, cx).await;
// Add a second workspace with its own agent panel.
let fs = cx.update(|_, cx| <dyn fs::Fs>::global(cx));
@@ -2029,7 +2049,8 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
let panel_b = add_agent_panel(&workspace_b, cx);
cx.run_until_parked();
- let workspace_a = multi_workspace.read_with(cx, |mw, _cx| mw.workspaces()[0].clone());
+ let workspace_a =
+ multi_workspace.read_with(cx, |mw, _cx| mw.workspaces().next().unwrap().clone());
// ── 1. Initial state: focused thread derived from active panel ─────
sidebar.read_with(cx, |sidebar, _cx| {
@@ -2049,6 +2070,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
updated_at: Utc::now(),
created_at: None,
folder_paths: PathList::default(),
+ main_worktree_paths: PathList::default(),
archived: false,
},
&workspace_a,
@@ -2089,8 +2111,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
open_thread_with_connection(&panel_b, connection_b, cx);
send_message(&panel_b, cx);
let session_id_b = active_session_id(&panel_b, cx);
- let path_list_b = PathList::new(&[std::path::PathBuf::from("/project-b")]);
- save_test_thread_metadata(&session_id_b, path_list_b.clone(), cx).await;
+ save_test_thread_metadata(&session_id_b, &project_b, cx).await;
cx.run_until_parked();
// Workspace A is currently active. Click a thread in workspace B,
@@ -2104,6 +2125,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
updated_at: Utc::now(),
created_at: None,
folder_paths: PathList::default(),
+ main_worktree_paths: PathList::default(),
archived: false,
},
&workspace_b,
@@ -2126,7 +2148,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) {
});
multi_workspace.update_in(cx, |mw, window, cx| {
- let workspace = mw.workspaces()[0].clone();
+ let workspace = mw.workspaces().next().unwrap().clone();
mw.activate(workspace, window, cx);
});
cx.run_until_parked();
@@ -23,8 +23,8 @@ pub use debug_format::{
Request, TcpArgumentsTemplate, ZedDebugConfig,
};
pub use task_template::{
- DebugArgsRequest, HideStrategy, RevealStrategy, SaveStrategy, TaskTemplate, TaskTemplates,
- substitute_variables_in_map, substitute_variables_in_str,
+ DebugArgsRequest, HideStrategy, RevealStrategy, SaveStrategy, TaskHook, TaskTemplate,
+ TaskTemplates, substitute_variables_in_map, substitute_variables_in_str,
};
pub use util::shell::{Shell, ShellKind};
pub use util::shell_builder::ShellBuilder;
@@ -181,6 +181,10 @@ pub enum VariableName {
/// Open a Picker to select a process ID to use in place
/// Can only be used to debug configurations
PickProcessId,
+ /// An absolute path of the main (original) git worktree for the current repository.
+ /// For normal checkouts, this equals the worktree root. For linked worktrees,
+ /// this is the original repo's working directory.
+ MainGitWorktree,
/// Custom variable, provided by the plugin or other external source.
/// Will be printed with `CUSTOM_` prefix to avoid potential conflicts with other variables.
Custom(Cow<'static, str>),
@@ -216,6 +220,7 @@ impl FromStr for VariableName {
"LANGUAGE" => Self::Language,
"ROW" => Self::Row,
"COLUMN" => Self::Column,
+ "MAIN_GIT_WORKTREE" => Self::MainGitWorktree,
_ => {
if let Some(custom_name) =
without_prefix.strip_prefix(ZED_CUSTOM_VARIABLE_NAME_PREFIX)
@@ -251,6 +256,7 @@ impl std::fmt::Display for VariableName {
Self::Language => write!(f, "{ZED_VARIABLE_NAME_PREFIX}LANGUAGE"),
Self::RunnableSymbol => write!(f, "{ZED_VARIABLE_NAME_PREFIX}RUNNABLE_SYMBOL"),
Self::PickProcessId => write!(f, "{ZED_VARIABLE_NAME_PREFIX}PICK_PID"),
+ Self::MainGitWorktree => write!(f, "{ZED_VARIABLE_NAME_PREFIX}MAIN_GIT_WORKTREE"),
Self::Custom(s) => write!(
f,
"{ZED_VARIABLE_NAME_PREFIX}{ZED_CUSTOM_VARIABLE_NAME_PREFIX}{s}"
@@ -75,6 +75,9 @@ pub struct TaskTemplate {
/// Which edited buffers to save before running the task.
#[serde(default)]
pub save: SaveStrategy,
+ /// Hooks that this task runs when emitted.
+ #[serde(default)]
+ pub hooks: HashSet<TaskHook>,
}
#[derive(Deserialize, Eq, PartialEq, Clone, Debug)]
@@ -86,6 +89,14 @@ pub enum DebugArgsRequest {
Attach(AttachRequest),
}
+/// What to do with the terminal pane and tab, after the command was started.
+#[derive(Clone, Copy, Debug, PartialEq, Hash, Eq, Serialize, Deserialize, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum TaskHook {
+ #[serde(alias = "create_git_worktree")]
+ CreateWorktree,
+}
+
/// What to do with the terminal pane and tab, after the command was started.
#[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
@@ -116,11 +127,11 @@ pub enum HideStrategy {
#[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum SaveStrategy {
- #[default]
/// Save all edited buffers.
All,
/// Save the current buffer.
Current,
+ #[default]
/// Don't save any buffers.
None,
}
@@ -321,13 +321,11 @@ pub fn task_contexts(
})
.unwrap_or_default();
- let latest_selection = active_editor.as_ref().map(|active_editor| {
- active_editor
- .read(cx)
- .selections
- .newest_anchor()
- .head()
- .text_anchor
+ let latest_selection = active_editor.as_ref().and_then(|active_editor| {
+ let snapshot = active_editor.read(cx).buffer().read(cx).snapshot(cx);
+ snapshot
+ .anchor_to_buffer_anchor(active_editor.read(cx).selections.newest_anchor().head())
+ .map(|(anchor, _)| anchor)
});
let mut worktree_abs_paths = workspace
@@ -436,7 +434,9 @@ mod tests {
)
.await;
let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
- let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
+ let (worktree_store, git_store) = project.read_with(cx, |project, _| {
+ (project.worktree_store(), project.git_store().clone())
+ });
let rust_language = Arc::new(
Language::new(
LanguageConfig {
@@ -453,6 +453,7 @@ mod tests {
.unwrap()
.with_context_provider(Some(Arc::new(BasicContextProvider::new(
worktree_store.clone(),
+ git_store.clone(),
)))),
);
@@ -476,6 +477,7 @@ mod tests {
.unwrap()
.with_context_provider(Some(Arc::new(BasicContextProvider::new(
worktree_store.clone(),
+ git_store.clone(),
)))),
);
@@ -850,6 +850,7 @@ impl TerminalView {
fn send_text(&mut self, text: &SendText, _: &mut Window, cx: &mut Context<Self>) {
self.clear_bell(cx);
+ self.blink_manager.update(cx, BlinkManager::pause_blinking);
self.terminal.update(cx, |term, _| {
term.input(text.0.to_string().into_bytes());
});
@@ -858,6 +859,7 @@ impl TerminalView {
fn send_keystroke(&mut self, text: &SendKeystroke, _: &mut Window, cx: &mut Context<Self>) {
if let Some(keystroke) = Keystroke::parse(&text.0).log_err() {
self.clear_bell(cx);
+ self.blink_manager.update(cx, BlinkManager::pause_blinking);
self.process_keystroke(&keystroke, cx);
}
}
@@ -1820,6 +1822,7 @@ impl SearchableItem for TerminalView {
regex: true,
replacement: false,
selection: false,
+ select_all: false,
find_in_results: false,
}
}
@@ -24,7 +24,7 @@ pub struct Anchor {
/// Whether this anchor stays attached to the character *before* or *after*
/// the offset.
pub bias: Bias,
- pub buffer_id: Option<BufferId>,
+ pub buffer_id: BufferId,
}
impl Debug for Anchor {
@@ -46,28 +46,7 @@ impl Debug for Anchor {
}
impl Anchor {
- pub const MIN: Self = Self {
- timestamp_replica_id: clock::Lamport::MIN.replica_id,
- timestamp_value: clock::Lamport::MIN.value,
- offset: u32::MIN,
- bias: Bias::Left,
- buffer_id: None,
- };
-
- pub const MAX: Self = Self {
- timestamp_replica_id: clock::Lamport::MAX.replica_id,
- timestamp_value: clock::Lamport::MAX.value,
- offset: u32::MAX,
- bias: Bias::Right,
- buffer_id: None,
- };
-
- pub fn new(
- timestamp: clock::Lamport,
- offset: u32,
- bias: Bias,
- buffer_id: Option<BufferId>,
- ) -> Self {
+ pub fn new(timestamp: clock::Lamport, offset: u32, bias: Bias, buffer_id: BufferId) -> Self {
Self {
timestamp_replica_id: timestamp.replica_id,
timestamp_value: timestamp.value,
@@ -83,7 +62,7 @@ impl Anchor {
timestamp_value: clock::Lamport::MIN.value,
offset: u32::MIN,
bias: Bias::Left,
- buffer_id: Some(buffer_id),
+ buffer_id,
}
}
@@ -93,7 +72,7 @@ impl Anchor {
timestamp_value: clock::Lamport::MAX.value,
offset: u32::MAX,
bias: Bias::Right,
- buffer_id: Some(buffer_id),
+ buffer_id,
}
}
@@ -171,7 +150,7 @@ impl Anchor {
pub fn is_valid(&self, buffer: &BufferSnapshot) -> bool {
if self.is_min() || self.is_max() {
true
- } else if self.buffer_id.is_none_or(|id| id != buffer.remote_id) {
+ } else if self.buffer_id != buffer.remote_id {
false
} else {
let Some(fragment_id) = buffer.try_fragment_id_for_anchor(self) else {
@@ -207,6 +186,18 @@ impl Anchor {
value: self.timestamp_value,
}
}
+
+ pub fn opaque_id(&self) -> [u8; 20] {
+ let mut bytes = [0u8; 20];
+ let buffer_id: u64 = self.buffer_id.into();
+ bytes[0..8].copy_from_slice(&buffer_id.to_le_bytes());
+ bytes[8..12].copy_from_slice(&self.offset.to_le_bytes());
+ bytes[12..16].copy_from_slice(&self.timestamp_value.to_le_bytes());
+ let replica_id = self.timestamp_replica_id.as_u16();
+ bytes[16..18].copy_from_slice(&replica_id.to_le_bytes());
+ bytes[18] = self.bias as u8;
+ bytes
+ }
}
pub trait OffsetRangeExt {
@@ -237,6 +228,7 @@ where
pub trait AnchorRangeExt {
fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Ordering;
fn overlaps(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> bool;
+ fn contains_anchor(&self, b: Anchor, buffer: &BufferSnapshot) -> bool;
}
impl AnchorRangeExt for Range<Anchor> {
@@ -250,4 +242,8 @@ impl AnchorRangeExt for Range<Anchor> {
fn overlaps(&self, other: &Range<Anchor>, buffer: &BufferSnapshot) -> bool {
self.start.cmp(&other.end, buffer).is_lt() && other.start.cmp(&self.end, buffer).is_lt()
}
+
+ fn contains_anchor(&self, other: Anchor, buffer: &BufferSnapshot) -> bool {
+ self.start.cmp(&other, buffer).is_le() && self.end.cmp(&other, buffer).is_ge()
+ }
}
@@ -56,7 +56,10 @@ where
if edit.is_empty() {
return;
}
+ self.push_maybe_empty(edit);
+ }
+ pub fn push_maybe_empty(&mut self, edit: Edit<T>) {
if let Some(last) = self.0.last_mut() {
if last.old.end >= edit.old.start {
last.old.end = edit.old.end;
@@ -2377,7 +2377,7 @@ impl BufferSnapshot {
pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
where
D: 'a + TextDimension,
- A: 'a + IntoIterator<Item = &'a Anchor>,
+ A: 'a + IntoIterator<Item = Anchor>,
{
let anchors = anchors.into_iter();
self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
@@ -2390,7 +2390,7 @@ impl BufferSnapshot {
) -> impl 'a + Iterator<Item = (D, T)>
where
D: 'a + TextDimension,
- A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
+ A: 'a + IntoIterator<Item = (Anchor, T)>,
{
let anchors = anchors.into_iter();
let mut fragment_cursor = self
@@ -2406,7 +2406,7 @@ impl BufferSnapshot {
return (D::from_text_summary(&self.visible_text.summary()), payload);
}
- let Some(insertion) = self.try_find_fragment(anchor) else {
+ let Some(insertion) = self.try_find_fragment(&anchor) else {
panic!(
"invalid insertion for buffer {}@{:?} with anchor {:?}",
self.remote_id(),
@@ -2457,7 +2457,7 @@ impl BufferSnapshot {
} else if anchor.is_max() {
self.visible_text.len()
} else {
- debug_assert_eq!(anchor.buffer_id, Some(self.remote_id));
+ debug_assert_eq!(anchor.buffer_id, self.remote_id);
debug_assert!(
self.version.observed(anchor.timestamp()),
"Anchor timestamp {:?} not observed by buffer {:?}",
@@ -2489,7 +2489,7 @@ impl BufferSnapshot {
#[cold]
fn panic_bad_anchor(&self, anchor: &Anchor) -> ! {
- if anchor.buffer_id.is_some_and(|id| id != self.remote_id) {
+ if anchor.buffer_id != self.remote_id {
panic!(
"invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}",
self.remote_id, self.version
@@ -2553,12 +2553,12 @@ impl BufferSnapshot {
}
/// Returns an anchor range for the given input position range that is anchored to the text in the range.
- pub fn anchor_range_around<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
+ pub fn anchor_range_inside<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
self.anchor_after(position.start)..self.anchor_before(position.end)
}
/// Returns an anchor range for the given input position range that is anchored to the text before and after.
- pub fn anchor_range_between<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
+ pub fn anchor_range_outside<T: ToOffset>(&self, position: Range<T>) -> Range<Anchor> {
self.anchor_before(position.start)..self.anchor_after(position.end)
}
@@ -2608,7 +2608,7 @@ impl BufferSnapshot {
fragment.timestamp,
fragment.insertion_offset + overshoot as u32,
bias,
- Some(self.remote_id),
+ self.remote_id,
)
}
}
@@ -2616,8 +2616,7 @@ impl BufferSnapshot {
pub fn can_resolve(&self, anchor: &Anchor) -> bool {
anchor.is_min()
|| anchor.is_max()
- || (Some(self.remote_id) == anchor.buffer_id
- && self.version.observed(anchor.timestamp()))
+ || (self.remote_id == anchor.buffer_id && self.version.observed(anchor.timestamp()))
}
pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
@@ -2643,7 +2642,10 @@ impl BufferSnapshot {
where
D: TextDimension + Ord,
{
- self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
+ self.edits_since_in_range(
+ since,
+ Anchor::min_for_buffer(self.remote_id)..Anchor::max_for_buffer(self.remote_id),
+ )
}
pub fn anchored_edits_since<'a, D>(
@@ -2653,7 +2655,10 @@ impl BufferSnapshot {
where
D: TextDimension + Ord,
{
- self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
+ self.anchored_edits_since_in_range(
+ since,
+ Anchor::min_for_buffer(self.remote_id)..Anchor::max_for_buffer(self.remote_id),
+ )
}
pub fn edits_since_in_range<'a, D>(
@@ -2916,13 +2921,13 @@ impl<D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Ed
fragment.timestamp,
fragment.insertion_offset,
Bias::Right,
- Some(self.buffer_id),
+ self.buffer_id,
);
let end_anchor = Anchor::new(
fragment.timestamp,
fragment.insertion_offset + fragment.len,
Bias::Left,
- Some(self.buffer_id),
+ self.buffer_id,
);
if !fragment.was_visible(self.since, self.undos) && fragment.visible {
@@ -357,7 +357,7 @@ pub(crate) fn zed_default_dark() -> Theme {
("number".into(), orange.into()),
("operator".into(), HighlightStyle::default()),
("predictive".into(), HighlightStyle::default()),
- ("preproc".into(), HighlightStyle::default()),
+ ("preproc".into(), purple.into()),
("primary".into(), HighlightStyle::default()),
("property".into(), red.into()),
("punctuation".into(), HighlightStyle::default()),
@@ -377,6 +377,8 @@ pub(crate) fn zed_default_dark() -> Theme {
("variable".into(), HighlightStyle::default()),
("variable.special".into(), red.into()),
("variant".into(), HighlightStyle::default()),
+ ("diff.plus".into(), green.into()),
+ ("diff.minus".into(), red.into()),
])),
},
}
@@ -740,7 +740,6 @@ impl TitleBar {
.map(|mw| {
mw.read(cx)
.workspaces()
- .iter()
.filter_map(|ws| ws.read(cx).database_id())
.collect()
})
@@ -803,7 +802,6 @@ impl TitleBar {
.map(|mw| {
mw.read(cx)
.workspaces()
- .iter()
.filter_map(|ws| ws.read(cx).database_id())
.collect()
})
@@ -117,7 +117,7 @@ impl ActiveToolchain {
cx: &mut Context<Self>,
) {
let editor = editor.read(cx);
- if let Some((_, buffer, _)) = editor.active_excerpt(cx)
+ if let Some(buffer) = editor.active_buffer(cx)
&& let Some(worktree_id) = buffer.read(cx).file().map(|file| file.worktree_id(cx))
{
let subscription = cx.subscribe_in(
@@ -584,11 +584,11 @@ impl ToolchainSelector {
window: &mut Window,
cx: &mut Context<Workspace>,
) -> Option<()> {
- let (_, buffer, _) = workspace
+ let buffer = workspace
.active_item(cx)?
.act_as::<Editor>(cx)?
.read(cx)
- .active_excerpt(cx)?;
+ .active_buffer(cx)?;
let project = workspace.project().clone();
let language_name = buffer.read(cx).language()?.name();
@@ -29,6 +29,7 @@ mod notification;
mod popover;
mod popover_menu;
mod progress;
+mod redistributable_columns;
mod right_click_menu;
mod scrollbar;
mod stack;
@@ -73,6 +74,7 @@ pub use notification::*;
pub use popover::*;
pub use popover_menu::*;
pub use progress::*;
+pub use redistributable_columns::*;
pub use right_click_menu::*;
pub use scrollbar::*;
pub use stack::*;
@@ -1,7 +1,4 @@
-use crate::{
- CommonAnimationExt, DecoratedIcon, DiffStat, GradientFade, HighlightedLabel, IconDecoration,
- IconDecorationKind, Tooltip, prelude::*,
-};
+use crate::{CommonAnimationExt, DiffStat, GradientFade, HighlightedLabel, Tooltip, prelude::*};
use gpui::{
Animation, AnimationExt, AnyView, ClickEvent, Hsla, MouseButton, SharedString,
@@ -218,7 +215,7 @@ impl RenderOnce for ThreadItem {
let color = cx.theme().colors();
let sidebar_base_bg = color
.title_bar_background
- .blend(color.panel_background.opacity(0.32));
+ .blend(color.panel_background.opacity(0.25));
let raw_bg = self.base_bg.unwrap_or(sidebar_base_bg);
let apparent_bg = color.background.blend(raw_bg);
@@ -266,31 +263,31 @@ impl RenderOnce for ThreadItem {
Icon::new(self.icon).color(icon_color).size(IconSize::Small)
};
- let decoration = |icon: IconDecorationKind, color: Hsla| {
- IconDecoration::new(icon, base_bg, cx)
- .color(color)
- .position(gpui::Point {
- x: px(-2.),
- y: px(-2.),
- })
- };
-
- let (decoration, icon_tooltip) = if self.status == AgentThreadStatus::Error {
+ let (status_icon, icon_tooltip) = if self.status == AgentThreadStatus::Error {
(
- Some(decoration(IconDecorationKind::X, cx.theme().status().error)),
+ Some(
+ Icon::new(IconName::Close)
+ .size(IconSize::Small)
+ .color(Color::Error),
+ ),
Some("Thread has an Error"),
)
} else if self.status == AgentThreadStatus::WaitingForConfirmation {
(
- Some(decoration(
- IconDecorationKind::Triangle,
- cx.theme().status().warning,
- )),
+ Some(
+ Icon::new(IconName::Warning)
+ .size(IconSize::XSmall)
+ .color(Color::Warning),
+ ),
Some("Thread is Waiting for Confirmation"),
)
} else if self.notified {
(
- Some(decoration(IconDecorationKind::Dot, color.text_accent)),
+ Some(
+ Icon::new(IconName::Circle)
+ .size(IconSize::Small)
+ .color(Color::Accent),
+ ),
Some("Thread's Generation is Complete"),
)
} else {
@@ -306,9 +303,9 @@ impl RenderOnce for ThreadItem {
.with_rotate_animation(2),
)
.into_any_element()
- } else if let Some(decoration) = decoration {
+ } else if let Some(status_icon) = status_icon {
icon_container()
- .child(DecoratedIcon::new(agent_icon, Some(decoration)))
+ .child(status_icon)
.when_some(icon_tooltip, |icon, tooltip| {
icon.tooltip(Tooltip::text(tooltip))
})
@@ -551,12 +548,17 @@ impl Component for ThreadItem {
}
fn preview(_window: &mut Window, cx: &mut App) -> Option<AnyElement> {
+ let color = cx.theme().colors();
+ let bg = color
+ .title_bar_background
+ .blend(color.panel_background.opacity(0.25));
+
let container = || {
v_flex()
.w_72()
.border_1()
- .border_color(cx.theme().colors().border_variant)
- .bg(cx.theme().colors().panel_background)
+ .border_color(color.border_variant)
+ .bg(bg)
};
let thread_item_examples = vec![
@@ -570,16 +572,6 @@ impl Component for ThreadItem {
)
.into_any_element(),
),
- single_example(
- "Timestamp Only (hours)",
- container()
- .child(
- ThreadItem::new("ti-1b", "Thread with just a timestamp")
- .icon(IconName::AiClaude)
- .timestamp("3h"),
- )
- .into_any_element(),
- ),
single_example(
"Notified (weeks)",
container()
@@ -1,19 +1,19 @@
use std::{ops::Range, rc::Rc};
use gpui::{
- AbsoluteLength, AppContext as _, DefiniteLength, DragMoveEvent, Entity, EntityId, FocusHandle,
- Length, ListHorizontalSizingBehavior, ListSizingBehavior, ListState, Point, Stateful,
- UniformListScrollHandle, WeakEntity, list, transparent_black, uniform_list,
+ DefiniteLength, Entity, EntityId, FocusHandle, Length, ListHorizontalSizingBehavior,
+ ListSizingBehavior, ListState, Point, Stateful, UniformListScrollHandle, WeakEntity, list,
+ transparent_black, uniform_list,
};
-use itertools::intersperse_with;
use crate::{
ActiveTheme as _, AnyElement, App, Button, ButtonCommon as _, ButtonStyle, Color, Component,
- ComponentScope, Context, Div, ElementId, FixedWidth as _, FluentBuilder as _, Indicator,
- InteractiveElement, IntoElement, ParentElement, Pixels, RegisterComponent, RenderOnce,
- ScrollAxes, ScrollableHandle, Scrollbars, SharedString, StatefulInteractiveElement, Styled,
- StyledExt as _, StyledTypography, Window, WithScrollbar, div, example_group_with_title, h_flex,
- px, single_example,
+ ComponentScope, Context, Div, ElementId, FixedWidth as _, FluentBuilder as _, HeaderResizeInfo,
+ Indicator, InteractiveElement, IntoElement, ParentElement, Pixels, RedistributableColumnsState,
+ RegisterComponent, RenderOnce, ScrollAxes, ScrollableHandle, Scrollbars, SharedString,
+ StatefulInteractiveElement, Styled, StyledExt as _, StyledTypography, Window, WithScrollbar,
+ bind_redistributable_columns, div, example_group_with_title, h_flex, px,
+ render_redistributable_columns_resize_handles, single_example,
table_row::{IntoTableRow as _, TableRow},
v_flex,
};
@@ -22,16 +22,10 @@ pub mod table_row;
#[cfg(test)]
mod tests;
-const RESIZE_COLUMN_WIDTH: f32 = 8.0;
-const RESIZE_DIVIDER_WIDTH: f32 = 1.0;
-
/// Represents an unchecked table row, which is a vector of elements.
/// Will be converted into `TableRow<T>` internally
pub type UncheckedTableRow<T> = Vec<T>;
-#[derive(Debug)]
-pub(crate) struct DraggedColumn(pub(crate) usize);
-
struct UniformListData {
render_list_of_rows_fn:
Box<dyn Fn(Range<usize>, &mut Window, &mut App) -> Vec<UncheckedTableRow<AnyElement>>>,
@@ -113,124 +107,6 @@ impl TableInteractionState {
}
}
-/// Renders invisible resize handles overlaid on top of table content.
-///
-/// - Spacer: invisible element that matches the width of table column content
-/// - Divider: contains the actual resize handle that users can drag to resize columns
-///
-/// Structure: [spacer] [divider] [spacer] [divider] [spacer]
-///
-/// Business logic:
-/// 1. Creates spacers matching each column width
-/// 2. Intersperses (inserts) resize handles between spacers (interactive only for resizable columns)
-/// 3. Each handle supports hover highlighting, double-click to reset, and drag to resize
-/// 4. Returns an absolute-positioned overlay that sits on top of table content
-fn render_resize_handles(
- column_widths: &TableRow<Length>,
- resizable_columns: &TableRow<TableResizeBehavior>,
- initial_sizes: &TableRow<DefiniteLength>,
- columns: Option<Entity<RedistributableColumnsState>>,
- window: &mut Window,
- cx: &mut App,
-) -> AnyElement {
- let spacers = column_widths
- .as_slice()
- .iter()
- .map(|width| base_cell_style(Some(*width)).into_any_element());
-
- let mut column_ix = 0;
- let resizable_columns_shared = Rc::new(resizable_columns.clone());
- let initial_sizes_shared = Rc::new(initial_sizes.clone());
- let mut resizable_columns_iter = resizable_columns.as_slice().iter();
-
- let dividers = intersperse_with(spacers, || {
- let resizable_columns = Rc::clone(&resizable_columns_shared);
- let initial_sizes = Rc::clone(&initial_sizes_shared);
- window.with_id(column_ix, |window| {
- let mut resize_divider = div()
- .id(column_ix)
- .relative()
- .top_0()
- .w(px(RESIZE_DIVIDER_WIDTH))
- .h_full()
- .bg(cx.theme().colors().border.opacity(0.8));
-
- let mut resize_handle = div()
- .id("column-resize-handle")
- .absolute()
- .left_neg_0p5()
- .w(px(RESIZE_COLUMN_WIDTH))
- .h_full();
-
- if resizable_columns_iter
- .next()
- .is_some_and(TableResizeBehavior::is_resizable)
- {
- let hovered = window.use_state(cx, |_window, _cx| false);
-
- resize_divider = resize_divider.when(*hovered.read(cx), |div| {
- div.bg(cx.theme().colors().border_focused)
- });
-
- resize_handle = resize_handle
- .on_hover(move |&was_hovered, _, cx| hovered.write(cx, was_hovered))
- .cursor_col_resize()
- .when_some(columns.clone(), |this, columns| {
- this.on_click(move |event, window, cx| {
- if event.click_count() >= 2 {
- columns.update(cx, |columns, _| {
- columns.on_double_click(
- column_ix,
- &initial_sizes,
- &resizable_columns,
- window,
- );
- })
- }
-
- cx.stop_propagation();
- })
- })
- .on_drag(DraggedColumn(column_ix), |_, _offset, _window, cx| {
- cx.new(|_cx| gpui::Empty)
- })
- }
-
- column_ix += 1;
- resize_divider.child(resize_handle).into_any_element()
- })
- });
-
- h_flex()
- .id("resize-handles")
- .absolute()
- .inset_0()
- .w_full()
- .children(dividers)
- .into_any_element()
-}
-
-#[derive(Debug, Copy, Clone, PartialEq)]
-pub enum TableResizeBehavior {
- None,
- Resizable,
- MinSize(f32),
-}
-
-impl TableResizeBehavior {
- pub fn is_resizable(&self) -> bool {
- *self != TableResizeBehavior::None
- }
-
- pub fn min_size(&self) -> Option<f32> {
- match self {
- TableResizeBehavior::None => None,
- TableResizeBehavior::Resizable => Some(0.05),
- TableResizeBehavior::MinSize(min_size) => Some(*min_size),
- }
- }
-}
-
pub enum ColumnWidthConfig {
/// Static column widths (no resize handles).
Static {
@@ -278,6 +154,21 @@ impl ColumnWidthConfig {
}
}
+ /// Explicit column widths with no fixed table width.
+ pub fn explicit<T: Into<DefiniteLength>>(widths: Vec<T>) -> Self {
+ let cols = widths.len();
+ ColumnWidthConfig::Static {
+ widths: StaticColumnWidths::Explicit(
+ widths
+ .into_iter()
+ .map(Into::into)
+ .collect::<Vec<_>>()
+ .into_table_row(cols),
+ ),
+ table_width: None,
+ }
+ }
+
/// Column widths for rendering.
pub fn widths_to_render(&self, cx: &App) -> Option<TableRow<Length>> {
match self {
@@ -292,10 +183,7 @@ impl ColumnWidthConfig {
ColumnWidthConfig::Redistributable {
columns_state: entity,
..
- } => {
- let state = entity.read(cx);
- Some(state.preview_widths.map_cloned(Length::Definite))
- }
+ } => Some(entity.read(cx).widths_to_render()),
}
}
@@ -316,296 +204,6 @@ impl ColumnWidthConfig {
None => ListHorizontalSizingBehavior::FitList,
}
}
-
- /// Render resize handles overlay if applicable.
- pub fn render_resize_handles(&self, window: &mut Window, cx: &mut App) -> Option<AnyElement> {
- match self {
- ColumnWidthConfig::Redistributable {
- columns_state: entity,
- ..
- } => {
- let (column_widths, resize_behavior, initial_widths) = {
- let state = entity.read(cx);
- (
- state.preview_widths.map_cloned(Length::Definite),
- state.resize_behavior.clone(),
- state.initial_widths.clone(),
- )
- };
- Some(render_resize_handles(
- &column_widths,
- &resize_behavior,
- &initial_widths,
- Some(entity.clone()),
- window,
- cx,
- ))
- }
- _ => None,
- }
- }
-
- /// Returns info needed for header double-click-to-reset, if applicable.
- pub fn header_resize_info(&self, cx: &App) -> Option<HeaderResizeInfo> {
- match self {
- ColumnWidthConfig::Redistributable { columns_state, .. } => {
- let state = columns_state.read(cx);
- Some(HeaderResizeInfo {
- columns_state: columns_state.downgrade(),
- resize_behavior: state.resize_behavior.clone(),
- initial_widths: state.initial_widths.clone(),
- })
- }
- _ => None,
- }
- }
-}
-
-#[derive(Clone)]
-pub struct HeaderResizeInfo {
- pub columns_state: WeakEntity<RedistributableColumnsState>,
- pub resize_behavior: TableRow<TableResizeBehavior>,
- pub initial_widths: TableRow<DefiniteLength>,
-}
-
-pub struct RedistributableColumnsState {
- pub(crate) initial_widths: TableRow<DefiniteLength>,
- pub(crate) committed_widths: TableRow<DefiniteLength>,
- pub(crate) preview_widths: TableRow<DefiniteLength>,
- pub(crate) resize_behavior: TableRow<TableResizeBehavior>,
- pub(crate) cached_table_width: Pixels,
-}
-
-impl RedistributableColumnsState {
- pub fn new(
- cols: usize,
- initial_widths: UncheckedTableRow<impl Into<DefiniteLength>>,
- resize_behavior: UncheckedTableRow<TableResizeBehavior>,
- ) -> Self {
- let widths: TableRow<DefiniteLength> = initial_widths
- .into_iter()
- .map(Into::into)
- .collect::<Vec<_>>()
- .into_table_row(cols);
- Self {
- initial_widths: widths.clone(),
- committed_widths: widths.clone(),
- preview_widths: widths,
- resize_behavior: resize_behavior.into_table_row(cols),
- cached_table_width: Default::default(),
- }
- }
-
- pub fn cols(&self) -> usize {
- self.committed_widths.cols()
- }
-
- pub fn initial_widths(&self) -> &TableRow<DefiniteLength> {
- &self.initial_widths
- }
-
- pub fn resize_behavior(&self) -> &TableRow<TableResizeBehavior> {
- &self.resize_behavior
- }
-
- fn get_fraction(length: &DefiniteLength, bounds_width: Pixels, rem_size: Pixels) -> f32 {
- match length {
- DefiniteLength::Absolute(AbsoluteLength::Pixels(pixels)) => *pixels / bounds_width,
- DefiniteLength::Absolute(AbsoluteLength::Rems(rems_width)) => {
- rems_width.to_pixels(rem_size) / bounds_width
- }
- DefiniteLength::Fraction(fraction) => *fraction,
- }
- }
-
- pub(crate) fn on_double_click(
- &mut self,
- double_click_position: usize,
- initial_sizes: &TableRow<DefiniteLength>,
- resize_behavior: &TableRow<TableResizeBehavior>,
- window: &mut Window,
- ) {
- let bounds_width = self.cached_table_width;
- let rem_size = window.rem_size();
- let initial_sizes =
- initial_sizes.map_ref(|length| Self::get_fraction(length, bounds_width, rem_size));
- let widths = self
- .committed_widths
- .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size));
-
- let updated_widths = Self::reset_to_initial_size(
- double_click_position,
- widths,
- initial_sizes,
- resize_behavior,
- );
- self.committed_widths = updated_widths.map(DefiniteLength::Fraction);
- self.preview_widths = self.committed_widths.clone();
- }
-
- pub(crate) fn reset_to_initial_size(
- col_idx: usize,
- mut widths: TableRow<f32>,
- initial_sizes: TableRow<f32>,
- resize_behavior: &TableRow<TableResizeBehavior>,
- ) -> TableRow<f32> {
- let diff = initial_sizes[col_idx] - widths[col_idx];
-
- let left_diff =
- initial_sizes[..col_idx].iter().sum::<f32>() - widths[..col_idx].iter().sum::<f32>();
- let right_diff = initial_sizes[col_idx + 1..].iter().sum::<f32>()
- - widths[col_idx + 1..].iter().sum::<f32>();
-
- let go_left_first = if diff < 0.0 {
- left_diff > right_diff
- } else {
- left_diff < right_diff
- };
-
- if !go_left_first {
- let diff_remaining =
- Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, 1);
-
- if diff_remaining != 0.0 && col_idx > 0 {
- Self::propagate_resize_diff(
- diff_remaining,
- col_idx,
- &mut widths,
- resize_behavior,
- -1,
- );
- }
- } else {
- let diff_remaining =
- Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, -1);
-
- if diff_remaining != 0.0 {
- Self::propagate_resize_diff(
- diff_remaining,
- col_idx,
- &mut widths,
- resize_behavior,
- 1,
- );
- }
- }
-
- widths
- }
-
- pub(crate) fn on_drag_move(
- &mut self,
- drag_event: &DragMoveEvent<DraggedColumn>,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- let drag_position = drag_event.event.position;
- let bounds = drag_event.bounds;
-
- let mut col_position = 0.0;
- let rem_size = window.rem_size();
- let bounds_width = bounds.right() - bounds.left();
- let col_idx = drag_event.drag(cx).0;
-
- let divider_width = Self::get_fraction(
- &DefiniteLength::Absolute(AbsoluteLength::Pixels(px(RESIZE_DIVIDER_WIDTH))),
- bounds_width,
- rem_size,
- );
-
- let mut widths = self
- .committed_widths
- .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size));
-
- for length in widths[0..=col_idx].iter() {
- col_position += length + divider_width;
- }
-
- let mut total_length_ratio = col_position;
- for length in widths[col_idx + 1..].iter() {
- total_length_ratio += length;
- }
- let cols = self.resize_behavior.cols();
- total_length_ratio += (cols - 1 - col_idx) as f32 * divider_width;
-
- let drag_fraction = (drag_position.x - bounds.left()) / bounds_width;
- let drag_fraction = drag_fraction * total_length_ratio;
- let diff = drag_fraction - col_position - divider_width / 2.0;
-
- Self::drag_column_handle(diff, col_idx, &mut widths, &self.resize_behavior);
-
- self.preview_widths = widths.map(DefiniteLength::Fraction);
- }
-
- pub(crate) fn drag_column_handle(
- diff: f32,
- col_idx: usize,
- widths: &mut TableRow<f32>,
- resize_behavior: &TableRow<TableResizeBehavior>,
- ) {
- if diff > 0.0 {
- Self::propagate_resize_diff(diff, col_idx, widths, resize_behavior, 1);
- } else {
- Self::propagate_resize_diff(-diff, col_idx + 1, widths, resize_behavior, -1);
- }
- }
-
- pub(crate) fn propagate_resize_diff(
- diff: f32,
- col_idx: usize,
- widths: &mut TableRow<f32>,
- resize_behavior: &TableRow<TableResizeBehavior>,
- direction: i8,
- ) -> f32 {
- let mut diff_remaining = diff;
- if resize_behavior[col_idx].min_size().is_none() {
- return diff;
- }
-
- let step_right;
- let step_left;
- if direction < 0 {
- step_right = 0;
- step_left = 1;
- } else {
- step_right = 1;
- step_left = 0;
- }
- if col_idx == 0 && direction < 0 {
- return diff;
- }
- let mut curr_column = col_idx + step_right - step_left;
-
- while diff_remaining != 0.0 && curr_column < widths.cols() {
- let Some(min_size) = resize_behavior[curr_column].min_size() else {
- if curr_column == 0 {
- break;
- }
- curr_column -= step_left;
- curr_column += step_right;
- continue;
- };
-
- let curr_width = widths[curr_column] - diff_remaining;
- widths[curr_column] = curr_width;
-
- if min_size > curr_width {
- diff_remaining = min_size - curr_width;
- widths[curr_column] = min_size;
- } else {
- diff_remaining = 0.0;
- break;
- }
- if curr_column == 0 {
- break;
- }
- curr_column -= step_left;
- curr_column += step_right;
- }
- widths[col_idx] = widths[col_idx] + (diff - diff_remaining);
-
- diff_remaining
- }
}
/// A table component
@@ -919,11 +517,8 @@ pub fn render_table_header(
if event.click_count() > 1 {
info.columns_state
.update(cx, |column, _| {
- column.on_double_click(
- header_idx,
- &info.initial_widths,
- &info.resize_behavior,
- window,
+ column.reset_column_to_initial_width(
+ header_idx, window,
);
})
.ok();
@@ -962,6 +557,19 @@ impl TableRenderContext {
disable_base_cell_style: table.disable_base_cell_style,
}
}
+
+ pub fn for_column_widths(column_widths: Option<TableRow<Length>>, use_ui_font: bool) -> Self {
+ Self {
+ striped: false,
+ show_row_borders: true,
+ show_row_hover: true,
+ total_row_count: 0,
+ column_widths,
+ map_row: None,
+ use_ui_font,
+ disable_base_cell_style: false,
+ }
+ }
}
impl RenderOnce for Table {
@@ -969,9 +577,15 @@ impl RenderOnce for Table {
let table_context = TableRenderContext::new(&self, cx);
let interaction_state = self.interaction_state.and_then(|state| state.upgrade());
- let header_resize_info = interaction_state
- .as_ref()
- .and_then(|_| self.column_width_config.header_resize_info(cx));
+ let header_resize_info =
+ interaction_state
+ .as_ref()
+ .and_then(|_| match &self.column_width_config {
+ ColumnWidthConfig::Redistributable { columns_state, .. } => {
+ Some(HeaderResizeInfo::from_state(columns_state, cx))
+ }
+ _ => None,
+ });
let table_width = self.column_width_config.table_width();
let horizontal_sizing = self.column_width_config.list_horizontal_sizing();
@@ -985,13 +599,19 @@ impl RenderOnce for Table {
ColumnWidthConfig::Redistributable {
columns_state: entity,
..
- } => Some(entity.downgrade()),
+ } => Some(entity.clone()),
_ => None,
});
- let resize_handles = interaction_state
- .as_ref()
- .and_then(|_| self.column_width_config.render_resize_handles(window, cx));
+ let resize_handles =
+ interaction_state
+ .as_ref()
+ .and_then(|_| match &self.column_width_config {
+ ColumnWidthConfig::Redistributable { columns_state, .. } => Some(
+ render_redistributable_columns_resize_handles(columns_state, window, cx),
+ ),
+ _ => None,
+ });
let table = div()
.when_some(table_width, |this, width| this.w(width))
@@ -1006,38 +626,8 @@ impl RenderOnce for Table {
cx,
))
})
- .when_some(redistributable_entity, {
- |this, widths| {
- this.on_drag_move::<DraggedColumn>({
- let widths = widths.clone();
- move |e, window, cx| {
- widths
- .update(cx, |widths, cx| {
- widths.on_drag_move(e, window, cx);
- })
- .ok();
- }
- })
- .on_children_prepainted({
- let widths = widths.clone();
- move |bounds, _, cx| {
- widths
- .update(cx, |widths, _| {
- // This works because all children x axis bounds are the same
- widths.cached_table_width =
- bounds[0].right() - bounds[0].left();
- })
- .ok();
- }
- })
- .on_drop::<DraggedColumn>(move |_, _, cx| {
- widths
- .update(cx, |widths, _| {
- widths.committed_widths = widths.preview_widths.clone();
- })
- .ok();
- })
- }
+ .when_some(redistributable_entity, |this, widths| {
+ bind_redistributable_columns(this, widths)
})
.child({
let content = div()
@@ -1,4 +1,5 @@
-use super::*;
+use super::table_row::TableRow;
+use crate::{RedistributableColumnsState, TableResizeBehavior};
fn is_almost_eq(a: &[f32], b: &[f32]) -> bool {
a.len() == b.len() && a.iter().zip(b).all(|(x, y)| (x - y).abs() < 1e-6)
@@ -0,0 +1,485 @@
+use std::rc::Rc;
+
+use gpui::{
+ AbsoluteLength, AppContext as _, Bounds, DefiniteLength, DragMoveEvent, Empty, Entity, Length,
+ WeakEntity,
+};
+use itertools::intersperse_with;
+
+use super::data_table::table_row::{IntoTableRow as _, TableRow};
+use crate::{
+ ActiveTheme as _, AnyElement, App, Context, Div, FluentBuilder as _, InteractiveElement,
+ IntoElement, ParentElement, Pixels, StatefulInteractiveElement, Styled, Window, div, h_flex,
+ px,
+};
+
+const RESIZE_COLUMN_WIDTH: f32 = 8.0;
+const RESIZE_DIVIDER_WIDTH: f32 = 1.0;
+
+#[derive(Debug)]
+struct DraggedColumn(usize);
+
+#[derive(Debug, Copy, Clone, PartialEq)]
+pub enum TableResizeBehavior {
+ None,
+ Resizable,
+ MinSize(f32),
+}
+
+impl TableResizeBehavior {
+ pub fn is_resizable(&self) -> bool {
+ *self != TableResizeBehavior::None
+ }
+
+ pub fn min_size(&self) -> Option<f32> {
+ match self {
+ TableResizeBehavior::None => None,
+ TableResizeBehavior::Resizable => Some(0.05),
+ TableResizeBehavior::MinSize(min_size) => Some(*min_size),
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct HeaderResizeInfo {
+ pub columns_state: WeakEntity<RedistributableColumnsState>,
+ pub resize_behavior: TableRow<TableResizeBehavior>,
+}
+
+impl HeaderResizeInfo {
+ pub fn from_state(columns_state: &Entity<RedistributableColumnsState>, cx: &App) -> Self {
+ let resize_behavior = columns_state.read(cx).resize_behavior().clone();
+ Self {
+ columns_state: columns_state.downgrade(),
+ resize_behavior,
+ }
+ }
+}
+
+pub struct RedistributableColumnsState {
+ pub(crate) initial_widths: TableRow<DefiniteLength>,
+ pub(crate) committed_widths: TableRow<DefiniteLength>,
+ pub(crate) preview_widths: TableRow<DefiniteLength>,
+ pub(crate) resize_behavior: TableRow<TableResizeBehavior>,
+ pub(crate) cached_container_width: Pixels,
+}
+
+impl RedistributableColumnsState {
+ pub fn new(
+ cols: usize,
+ initial_widths: Vec<impl Into<DefiniteLength>>,
+ resize_behavior: Vec<TableResizeBehavior>,
+ ) -> Self {
+ let widths: TableRow<DefiniteLength> = initial_widths
+ .into_iter()
+ .map(Into::into)
+ .collect::<Vec<_>>()
+ .into_table_row(cols);
+ Self {
+ initial_widths: widths.clone(),
+ committed_widths: widths.clone(),
+ preview_widths: widths,
+ resize_behavior: resize_behavior.into_table_row(cols),
+ cached_container_width: Default::default(),
+ }
+ }
+
+ pub fn cols(&self) -> usize {
+ self.committed_widths.cols()
+ }
+
+ pub fn initial_widths(&self) -> &TableRow<DefiniteLength> {
+ &self.initial_widths
+ }
+
+ pub fn preview_widths(&self) -> &TableRow<DefiniteLength> {
+ &self.preview_widths
+ }
+
+ pub fn resize_behavior(&self) -> &TableRow<TableResizeBehavior> {
+ &self.resize_behavior
+ }
+
+ pub fn widths_to_render(&self) -> TableRow<Length> {
+ self.preview_widths.map_cloned(Length::Definite)
+ }
+
+ pub fn preview_fractions(&self, rem_size: Pixels) -> TableRow<f32> {
+ if self.cached_container_width > px(0.) {
+ self.preview_widths
+ .map_ref(|length| Self::get_fraction(length, self.cached_container_width, rem_size))
+ } else {
+ self.preview_widths.map_ref(|length| match length {
+ DefiniteLength::Fraction(fraction) => *fraction,
+ DefiniteLength::Absolute(_) => 0.0,
+ })
+ }
+ }
+
+ pub fn preview_column_width(&self, column_index: usize, window: &Window) -> Option<Pixels> {
+ let width = self.preview_widths().as_slice().get(column_index)?;
+ match width {
+ DefiniteLength::Fraction(fraction) if self.cached_container_width > px(0.) => {
+ Some(self.cached_container_width * *fraction)
+ }
+ DefiniteLength::Fraction(_) => None,
+ DefiniteLength::Absolute(AbsoluteLength::Pixels(pixels)) => Some(*pixels),
+ DefiniteLength::Absolute(AbsoluteLength::Rems(rems_width)) => {
+ Some(rems_width.to_pixels(window.rem_size()))
+ }
+ }
+ }
+
+ pub fn cached_container_width(&self) -> Pixels {
+ self.cached_container_width
+ }
+
+ pub fn set_cached_container_width(&mut self, width: Pixels) {
+ self.cached_container_width = width;
+ }
+
+ pub fn commit_preview(&mut self) {
+ self.committed_widths = self.preview_widths.clone();
+ }
+
+ pub fn reset_column_to_initial_width(&mut self, column_index: usize, window: &Window) {
+ let bounds_width = self.cached_container_width;
+ if bounds_width <= px(0.) {
+ return;
+ }
+
+ let rem_size = window.rem_size();
+ let initial_sizes = self
+ .initial_widths
+ .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size));
+ let widths = self
+ .committed_widths
+ .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size));
+
+ let updated_widths =
+ Self::reset_to_initial_size(column_index, widths, initial_sizes, &self.resize_behavior);
+ self.committed_widths = updated_widths.map(DefiniteLength::Fraction);
+ self.preview_widths = self.committed_widths.clone();
+ }
+
+ fn get_fraction(length: &DefiniteLength, bounds_width: Pixels, rem_size: Pixels) -> f32 {
+ match length {
+ DefiniteLength::Absolute(AbsoluteLength::Pixels(pixels)) => *pixels / bounds_width,
+ DefiniteLength::Absolute(AbsoluteLength::Rems(rems_width)) => {
+ rems_width.to_pixels(rem_size) / bounds_width
+ }
+ DefiniteLength::Fraction(fraction) => *fraction,
+ }
+ }
+
+ pub(crate) fn reset_to_initial_size(
+ col_idx: usize,
+ mut widths: TableRow<f32>,
+ initial_sizes: TableRow<f32>,
+ resize_behavior: &TableRow<TableResizeBehavior>,
+ ) -> TableRow<f32> {
+ let diff = initial_sizes[col_idx] - widths[col_idx];
+
+ let left_diff =
+ initial_sizes[..col_idx].iter().sum::<f32>() - widths[..col_idx].iter().sum::<f32>();
+ let right_diff = initial_sizes[col_idx + 1..].iter().sum::<f32>()
+ - widths[col_idx + 1..].iter().sum::<f32>();
+
+ let go_left_first = if diff < 0.0 {
+ left_diff > right_diff
+ } else {
+ left_diff < right_diff
+ };
+
+ if !go_left_first {
+ let diff_remaining =
+ Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, 1);
+
+ if diff_remaining != 0.0 && col_idx > 0 {
+ Self::propagate_resize_diff(
+ diff_remaining,
+ col_idx,
+ &mut widths,
+ resize_behavior,
+ -1,
+ );
+ }
+ } else {
+ let diff_remaining =
+ Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, -1);
+
+ if diff_remaining != 0.0 {
+ Self::propagate_resize_diff(
+ diff_remaining,
+ col_idx,
+ &mut widths,
+ resize_behavior,
+ 1,
+ );
+ }
+ }
+
+ widths
+ }
+
+ fn on_drag_move(
+ &mut self,
+ drag_event: &DragMoveEvent<DraggedColumn>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let drag_position = drag_event.event.position;
+ let bounds = drag_event.bounds;
+ let bounds_width = bounds.right() - bounds.left();
+ if bounds_width <= px(0.) {
+ return;
+ }
+
+ let mut col_position = 0.0;
+ let rem_size = window.rem_size();
+ let col_idx = drag_event.drag(cx).0;
+
+ let divider_width = Self::get_fraction(
+ &DefiniteLength::Absolute(AbsoluteLength::Pixels(px(RESIZE_DIVIDER_WIDTH))),
+ bounds_width,
+ rem_size,
+ );
+
+ let mut widths = self
+ .committed_widths
+ .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size));
+
+ for length in widths[0..=col_idx].iter() {
+ col_position += length + divider_width;
+ }
+
+ let mut total_length_ratio = col_position;
+ for length in widths[col_idx + 1..].iter() {
+ total_length_ratio += length;
+ }
+ let cols = self.resize_behavior.cols();
+ total_length_ratio += (cols - 1 - col_idx) as f32 * divider_width;
+
+ let drag_fraction = (drag_position.x - bounds.left()) / bounds_width;
+ let drag_fraction = drag_fraction * total_length_ratio;
+ let diff = drag_fraction - col_position - divider_width / 2.0;
+
+ Self::drag_column_handle(diff, col_idx, &mut widths, &self.resize_behavior);
+
+ self.preview_widths = widths.map(DefiniteLength::Fraction);
+ }
+
+ pub(crate) fn drag_column_handle(
+ diff: f32,
+ col_idx: usize,
+ widths: &mut TableRow<f32>,
+ resize_behavior: &TableRow<TableResizeBehavior>,
+ ) {
+ if diff > 0.0 {
+ Self::propagate_resize_diff(diff, col_idx, widths, resize_behavior, 1);
+ } else {
+ Self::propagate_resize_diff(-diff, col_idx + 1, widths, resize_behavior, -1);
+ }
+ }
+
+ pub(crate) fn propagate_resize_diff(
+ diff: f32,
+ col_idx: usize,
+ widths: &mut TableRow<f32>,
+ resize_behavior: &TableRow<TableResizeBehavior>,
+ direction: i8,
+ ) -> f32 {
+ let mut diff_remaining = diff;
+ if resize_behavior[col_idx].min_size().is_none() {
+ return diff;
+ }
+
+ let step_right;
+ let step_left;
+ if direction < 0 {
+ step_right = 0;
+ step_left = 1;
+ } else {
+ step_right = 1;
+ step_left = 0;
+ }
+ if col_idx == 0 && direction < 0 {
+ return diff;
+ }
+ let mut curr_column = col_idx + step_right - step_left;
+
+ while diff_remaining != 0.0 && curr_column < widths.cols() {
+ let Some(min_size) = resize_behavior[curr_column].min_size() else {
+ if curr_column == 0 {
+ break;
+ }
+ curr_column -= step_left;
+ curr_column += step_right;
+ continue;
+ };
+
+ let curr_width = widths[curr_column] - diff_remaining;
+ widths[curr_column] = curr_width;
+
+ if min_size > curr_width {
+ diff_remaining = min_size - curr_width;
+ widths[curr_column] = min_size;
+ } else {
+ diff_remaining = 0.0;
+ break;
+ }
+ if curr_column == 0 {
+ break;
+ }
+ curr_column -= step_left;
+ curr_column += step_right;
+ }
+ widths[col_idx] = widths[col_idx] + (diff - diff_remaining);
+
+ diff_remaining
+ }
+}
+
+pub fn bind_redistributable_columns(
+ container: Div,
+ columns_state: Entity<RedistributableColumnsState>,
+) -> Div {
+ container
+ .on_drag_move::<DraggedColumn>({
+ let columns_state = columns_state.clone();
+ move |event, window, cx| {
+ columns_state.update(cx, |columns, cx| {
+ columns.on_drag_move(event, window, cx);
+ });
+ }
+ })
+ .on_children_prepainted({
+ let columns_state = columns_state.clone();
+ move |bounds, _, cx| {
+ if let Some(width) = child_bounds_width(&bounds) {
+ columns_state.update(cx, |columns, _| {
+ columns.set_cached_container_width(width);
+ });
+ }
+ }
+ })
+ .on_drop::<DraggedColumn>(move |_, _, cx| {
+ columns_state.update(cx, |columns, _| {
+ columns.commit_preview();
+ });
+ })
+}
+
+pub fn render_redistributable_columns_resize_handles(
+ columns_state: &Entity<RedistributableColumnsState>,
+ window: &mut Window,
+ cx: &mut App,
+) -> AnyElement {
+ let (column_widths, resize_behavior) = {
+ let state = columns_state.read(cx);
+ (state.widths_to_render(), state.resize_behavior().clone())
+ };
+
+ let mut column_ix = 0;
+ let resize_behavior = Rc::new(resize_behavior);
+ let dividers = intersperse_with(
+ column_widths
+ .as_slice()
+ .iter()
+ .copied()
+ .map(|width| resize_spacer(width).into_any_element()),
+ || {
+ let current_column_ix = column_ix;
+ let resize_behavior = Rc::clone(&resize_behavior);
+ let columns_state = columns_state.clone();
+ column_ix += 1;
+
+ window.with_id(current_column_ix, |window| {
+ let mut resize_divider = div()
+ .id(current_column_ix)
+ .relative()
+ .top_0()
+ .w(px(RESIZE_DIVIDER_WIDTH))
+ .h_full()
+ .bg(cx.theme().colors().border.opacity(0.8));
+
+ let mut resize_handle = div()
+ .id("column-resize-handle")
+ .absolute()
+ .left_neg_0p5()
+ .w(px(RESIZE_COLUMN_WIDTH))
+ .h_full();
+
+ if resize_behavior[current_column_ix].is_resizable() {
+ let is_highlighted = window.use_state(cx, |_window, _cx| false);
+
+ resize_divider = resize_divider.when(*is_highlighted.read(cx), |div| {
+ div.bg(cx.theme().colors().border_focused)
+ });
+
+ resize_handle = resize_handle
+ .on_hover({
+ let is_highlighted = is_highlighted.clone();
+ move |&was_hovered, _, cx| is_highlighted.write(cx, was_hovered)
+ })
+ .cursor_col_resize()
+ .on_click({
+ let columns_state = columns_state.clone();
+ move |event, window, cx| {
+ if event.click_count() >= 2 {
+ columns_state.update(cx, |columns, _| {
+ columns.reset_column_to_initial_width(
+ current_column_ix,
+ window,
+ );
+ });
+ }
+
+ cx.stop_propagation();
+ }
+ })
+ .on_drag(DraggedColumn(current_column_ix), {
+ let is_highlighted = is_highlighted.clone();
+ move |_, _offset, _window, cx| {
+ is_highlighted.write(cx, true);
+ cx.new(|_cx| Empty)
+ }
+ })
+ .on_drop::<DraggedColumn>(move |_, _, cx| {
+ is_highlighted.write(cx, false);
+ columns_state.update(cx, |state, _| {
+ state.commit_preview();
+ });
+ });
+ }
+
+ resize_divider.child(resize_handle).into_any_element()
+ })
+ },
+ );
+
+ h_flex()
+ .id("resize-handles")
+ .absolute()
+ .inset_0()
+ .w_full()
+ .children(dividers)
+ .into_any_element()
+}
+
+fn resize_spacer(width: Length) -> Div {
+ div().w(width).h_full()
+}
+
+fn child_bounds_width(bounds: &[Bounds<Pixels>]) -> Option<Pixels> {
+ let first_bounds = bounds.first()?;
+ let mut left = first_bounds.left();
+ let mut right = first_bounds.right();
+
+ for bound in bounds.iter().skip(1) {
+ left = left.min(bound.left());
+ right = right.max(bound.right());
+ }
+
+ Some(right - left)
+}
@@ -38,7 +38,7 @@ impl Hash for PathList {
}
}
-#[derive(Debug, Serialize, Deserialize)]
+#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SerializedPathList {
pub paths: String,
pub order: String,
@@ -65,6 +65,16 @@ impl PathList {
self.paths.is_empty()
}
+ /// Returns a new `PathList` with the given path removed.
+ pub fn without_path(&self, path_to_remove: &Path) -> PathList {
+ let paths: Vec<PathBuf> = self
+ .ordered_paths()
+ .filter(|p| p.as_path() != path_to_remove)
+ .cloned()
+ .collect();
+ PathList::new(&paths)
+ }
+
/// Get the paths in lexicographic order.
pub fn paths(&self) -> &[PathBuf] {
self.paths.as_ref()
@@ -2,9 +2,21 @@ use std::path::Path;
use anyhow::{Context as _, Result};
use collections::HashMap;
+use serde::Deserialize;
use crate::shell::ShellKind;
+fn parse_env_map_from_noisy_output(output: &str) -> Result<collections::HashMap<String, String>> {
+ for (position, _) in output.match_indices('{') {
+ let candidate = &output[position..];
+ let mut deserializer = serde_json::Deserializer::from_str(candidate);
+ if let Ok(env_map) = HashMap::<String, String>::deserialize(&mut deserializer) {
+ return Ok(env_map);
+ }
+ }
+ anyhow::bail!("Failed to find JSON in shell output: {output}")
+}
+
pub fn print_env() {
let env_vars: HashMap<String, String> = std::env::vars().collect();
let json = serde_json::to_string_pretty(&env_vars).unwrap_or_else(|err| {
@@ -109,10 +121,9 @@ async fn capture_unix(
);
// Parse the JSON output from zed --printenv
- let env_map: collections::HashMap<String, String> = serde_json::from_str(&env_output)
- .with_context(|| {
- format!("Failed to deserialize environment variables from json: {env_output}")
- })?;
+ let env_map = parse_env_map_from_noisy_output(&env_output).with_context(|| {
+ format!("Failed to deserialize environment variables from json: {env_output}")
+ })?;
Ok(env_map)
}
@@ -213,14 +224,10 @@ async fn capture_windows(
&format!("cd {}; {} --printenv", quoted_directory, zed_command),
])
}
- ShellKind::Cmd => cmd.args([
- "/c",
- "cd",
- &directory_string,
- "&&",
- &zed_path_string,
- "--printenv",
- ]),
+ ShellKind::Cmd => {
+ let dir = directory_string.trim_end_matches('\\');
+ cmd.args(["/d", "/c", "cd", dir, "&&", &zed_path_string, "--printenv"])
+ }
}
.stdin(Stdio::null())
.stdout(Stdio::piped())
@@ -238,8 +245,7 @@ async fn capture_windows(
);
let env_output = String::from_utf8_lossy(&output.stdout);
- // Parse the JSON output from zed --printenv
- serde_json::from_str(&env_output).with_context(|| {
+ parse_env_map_from_noisy_output(&env_output).with_context(|| {
format!("Failed to deserialize environment variables from json: {env_output}")
})
}
@@ -1348,7 +1348,7 @@ impl Position {
let snapshot = editor.snapshot(window, cx);
let target = match self {
Position::Line { row, offset } => {
- if let Some(anchor) = editor.active_excerpt(cx).and_then(|(_, buffer, _)| {
+ if let Some(anchor) = editor.active_buffer(cx).and_then(|buffer| {
editor.buffer().read(cx).buffer_point_to_anchor(
&buffer,
Point::new(row.saturating_sub(1), 0),
@@ -2336,7 +2336,7 @@ impl Vim {
match c {
'%' => {
self.update_editor(cx, |_, editor, cx| {
- if let Some((_, buffer, _)) = editor.active_excerpt(cx)
+ if let Some(buffer) = editor.active_buffer(cx)
&& let Some(file) = buffer.read(cx).file()
&& let Some(local) = file.as_local()
{
@@ -12,7 +12,6 @@ use editor::{
};
use gpui::actions;
use gpui::{Context, Window};
-use itertools::Itertools as _;
use language::{CharClassifier, CharKind, Point};
use search::{BufferSearchBar, SearchOptions};
use settings::Settings;
@@ -941,19 +940,15 @@ impl Vim {
editor.change_selections(SelectionEffects::default(), window, cx, |s| {
let buffer = snapshot.buffer_snapshot();
- s.select_anchor_ranges(
+ s.select_ranges(
prior_selections
.iter()
.cloned()
.chain(s.all_anchors(&snapshot).iter().map(|s| s.range()))
- .sorted_by(|a, b| {
- a.start
- .cmp(&b.start, buffer)
- .then_with(|| a.end.cmp(&b.end, buffer))
- })
- .dedup_by(|a, b| {
- a.start.cmp(&b.start, buffer).is_eq()
- && a.end.cmp(&b.end, buffer).is_eq()
+ .map(|range| {
+ let start = range.start.to_offset(buffer);
+ let end = range.end.to_offset(buffer);
+ start..end
}),
);
})
@@ -2152,6 +2147,93 @@ mod test {
cx.assert_state("hello two «oneˇ» two «oneˇ» two «oneˇ»", Mode::HelixSelect);
}
+ #[gpui::test]
+ async fn test_helix_select_next_match_wrapping_from_normal(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+ cx.enable_helix();
+
+ // Exact repro for #51573: start in HelixNormal, search, then `v` to
+ // enter HelixSelect, then `n` past last match.
+ //
+ // In HelixNormal, search collapses the cursor to the match start.
+ // Pressing `v` expands by only one character, creating a partial
+ // selection that overlaps the full match range when the search wraps.
+ // The overlapping ranges must be merged (not just deduped) to avoid
+ // a backward-seeking rope cursor panic.
+ cx.set_state(
+ indoc! {"
+ searˇch term
+ stuff
+ search term
+ other stuff
+ "},
+ Mode::HelixNormal,
+ );
+ cx.simulate_keystrokes("/ t e r m");
+ cx.simulate_keystrokes("enter");
+ cx.simulate_keystrokes("v");
+ cx.simulate_keystrokes("n");
+ cx.simulate_keystrokes("n");
+ // Should not panic when wrapping past last match.
+ cx.assert_state(
+ indoc! {"
+ search «termˇ»
+ stuff
+ search «termˇ»
+ other stuff
+ "},
+ Mode::HelixSelect,
+ );
+ }
+
+ #[gpui::test]
+ async fn test_helix_select_star_then_match(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+ cx.enable_helix();
+
+ // Repro attempts for #52852: `*` searches for word under cursor,
+ // `v` enters select, `n` accumulates matches, `m` triggers match mode.
+ // Try multiple cursor positions and match counts.
+
+ // Cursor on first occurrence, 3 more occurrences to select through
+ cx.set_state(
+ indoc! {"
+ ˇone two one three one four one
+ "},
+ Mode::HelixNormal,
+ );
+ cx.simulate_keystrokes("*");
+ cx.simulate_keystrokes("v");
+ cx.simulate_keystrokes("n n n");
+ // Should not panic on wrapping `n`.
+
+ // Cursor in the middle of text before matches
+ cx.set_state(
+ indoc! {"
+ heˇllo one two one three one
+ "},
+ Mode::HelixNormal,
+ );
+ cx.simulate_keystrokes("*");
+ cx.simulate_keystrokes("v");
+ cx.simulate_keystrokes("n");
+ // Should not panic.
+
+ // The original #52852 sequence: * v n n n then m m
+ cx.set_state(
+ indoc! {"
+ fn ˇfoo() { bar(foo()) }
+ fn baz() { foo() }
+ "},
+ Mode::HelixNormal,
+ );
+ cx.simulate_keystrokes("*");
+ cx.simulate_keystrokes("v");
+ cx.simulate_keystrokes("n n n");
+ cx.simulate_keystrokes("m m");
+ // Should not panic.
+ }
+
#[gpui::test]
async fn test_helix_substitute(cx: &mut gpui::TestAppContext) {
let mut cx = VimTestContext::new(cx, true).await;
@@ -1,16 +1,18 @@
use editor::{
Anchor, Bias, BufferOffset, DisplayPoint, Editor, MultiBufferOffset, RowExt, ToOffset,
+ ToPoint as _,
display_map::{DisplayRow, DisplaySnapshot, FoldPoint, ToDisplayPoint},
movement::{
self, FindRange, TextLayoutDetails, find_boundary, find_preceding_boundary_display_point,
},
};
use gpui::{Action, Context, Window, actions, px};
-use language::{CharKind, Point, Selection, SelectionGoal};
+use language::{CharKind, Point, Selection, SelectionGoal, TextObject, TreeSitterOptions};
use multi_buffer::MultiBufferRow;
use schemars::JsonSchema;
use serde::Deserialize;
use std::{f64, ops::Range};
+
use workspace::searchable::Direction;
use crate::{
@@ -2340,39 +2342,19 @@ fn start_of_next_sentence(
fn go_to_line(map: &DisplaySnapshot, display_point: DisplayPoint, line: usize) -> DisplayPoint {
let point = map.display_point_to_point(display_point, Bias::Left);
- let Some(mut excerpt) = map.buffer_snapshot().excerpt_containing(point..point) else {
+ let snapshot = map.buffer_snapshot();
+ let Some((buffer_snapshot, _)) = snapshot.point_to_buffer_point(point) else {
+ return display_point;
+ };
+
+ let Some(anchor) = snapshot.anchor_in_excerpt(buffer_snapshot.anchor_after(
+ buffer_snapshot.clip_point(Point::new((line - 1) as u32, point.column), Bias::Left),
+ )) else {
return display_point;
};
- let offset = excerpt.buffer().point_to_offset(
- excerpt
- .buffer()
- .clip_point(Point::new((line - 1) as u32, point.column), Bias::Left),
- );
- let buffer_range = excerpt.buffer_range();
- if offset >= buffer_range.start.0 && offset <= buffer_range.end.0 {
- let point = map
- .buffer_snapshot()
- .offset_to_point(excerpt.map_offset_from_buffer(BufferOffset(offset)));
- return map.clip_point(map.point_to_display_point(point, Bias::Left), Bias::Left);
- }
- for (excerpt, buffer, range) in map.buffer_snapshot().excerpts() {
- let excerpt_range = language::ToOffset::to_offset(&range.context.start, buffer)
- ..language::ToOffset::to_offset(&range.context.end, buffer);
- if offset >= excerpt_range.start && offset <= excerpt_range.end {
- let text_anchor = buffer.anchor_after(offset);
- let anchor = Anchor::in_buffer(excerpt, text_anchor);
- return anchor.to_display_point(map);
- } else if offset <= excerpt_range.start {
- let anchor = Anchor::in_buffer(excerpt, range.context.start);
- return anchor.to_display_point(map);
- }
- }
map.clip_point(
- map.point_to_display_point(
- map.buffer_snapshot().clip_point(point, Bias::Left),
- Bias::Left,
- ),
+ map.point_to_display_point(anchor.to_point(snapshot), Bias::Left),
Bias::Left,
)
}
@@ -2469,6 +2451,10 @@ fn find_matching_bracket_text_based(
.take_while(|(_, char_offset)| *char_offset < line_range.end)
.find_map(|(ch, char_offset)| get_bracket_pair(ch).map(|info| (info, char_offset)));
+ if bracket_info.is_none() {
+ return find_matching_c_preprocessor_directive(map, line_range);
+ }
+
let (open, close, is_opening) = bracket_info?.0;
let bracket_offset = bracket_info?.1;
@@ -2500,6 +2486,122 @@ fn find_matching_bracket_text_based(
None
}
+fn find_matching_c_preprocessor_directive(
+ map: &DisplaySnapshot,
+ line_range: Range<MultiBufferOffset>,
+) -> Option<MultiBufferOffset> {
+ let line_start = map
+ .buffer_chars_at(line_range.start)
+ .skip_while(|(c, _)| *c == ' ' || *c == '\t')
+ .map(|(c, _)| c)
+ .take(6)
+ .collect::<String>();
+
+ if line_start.starts_with("#if")
+ || line_start.starts_with("#else")
+ || line_start.starts_with("#elif")
+ {
+ let mut depth = 0i32;
+ for (ch, char_offset) in map.buffer_chars_at(line_range.end) {
+ if ch != '\n' {
+ continue;
+ }
+ let mut line_offset = char_offset + '\n'.len_utf8();
+
+ // Skip leading whitespace
+ map.buffer_chars_at(line_offset)
+ .take_while(|(c, _)| *c == ' ' || *c == '\t')
+ .for_each(|(_, _)| line_offset += 1);
+
+ // Check what directive starts the next line
+ let next_line_start = map
+ .buffer_chars_at(line_offset)
+ .map(|(c, _)| c)
+ .take(6)
+ .collect::<String>();
+
+ if next_line_start.starts_with("#if") {
+ depth += 1;
+ } else if next_line_start.starts_with("#endif") {
+ if depth > 0 {
+ depth -= 1;
+ } else {
+ return Some(line_offset);
+ }
+ } else if next_line_start.starts_with("#else") || next_line_start.starts_with("#elif") {
+ if depth == 0 {
+ return Some(line_offset);
+ }
+ }
+ }
+ } else if line_start.starts_with("#endif") {
+ let mut depth = 0i32;
+ for (ch, char_offset) in
+ map.reverse_buffer_chars_at(line_range.start.saturating_sub_usize(1))
+ {
+ let mut line_offset = if char_offset == MultiBufferOffset(0) {
+ MultiBufferOffset(0)
+ } else if ch != '\n' {
+ continue;
+ } else {
+ char_offset + '\n'.len_utf8()
+ };
+
+ // Skip leading whitespace
+ map.buffer_chars_at(line_offset)
+ .take_while(|(c, _)| *c == ' ' || *c == '\t')
+ .for_each(|(_, _)| line_offset += 1);
+
+ // Check what directive starts this line
+ let line_start = map
+ .buffer_chars_at(line_offset)
+ .skip_while(|(c, _)| *c == ' ' || *c == '\t')
+ .map(|(c, _)| c)
+ .take(6)
+ .collect::<String>();
+
+ if line_start.starts_with("\n\n") {
+ // empty line
+ continue;
+ } else if line_start.starts_with("#endif") {
+ depth += 1;
+ } else if line_start.starts_with("#if") {
+ if depth > 0 {
+ depth -= 1;
+ } else {
+ return Some(line_offset);
+ }
+ }
+ }
+ }
+ None
+}
+
+fn comment_delimiter_pair(
+ map: &DisplaySnapshot,
+ offset: MultiBufferOffset,
+) -> Option<(Range<MultiBufferOffset>, Range<MultiBufferOffset>)> {
+ let snapshot = map.buffer_snapshot();
+ snapshot
+ .text_object_ranges(offset..offset, TreeSitterOptions::default())
+ .find_map(|(range, obj)| {
+ if !matches!(obj, TextObject::InsideComment | TextObject::AroundComment)
+ || !range.contains(&offset)
+ {
+ return None;
+ }
+
+ let mut chars = snapshot.chars_at(range.start);
+ if (Some('/'), Some('*')) != (chars.next(), chars.next()) {
+ return None;
+ }
+
+ let open_range = range.start..range.start + 2usize;
+ let close_range = range.end - 2..range.end;
+ Some((open_range, close_range))
+ })
+}
+
fn matching(
map: &DisplaySnapshot,
display_point: DisplayPoint,
@@ -2627,6 +2729,32 @@ fn matching(
continue;
}
+ if let Some((open_range, close_range)) = comment_delimiter_pair(map, offset) {
+ if open_range.contains(&offset) {
+ return close_range.start.to_display_point(map);
+ }
+
+ if close_range.contains(&offset) {
+ return open_range.start.to_display_point(map);
+ }
+
+ let open_candidate = (open_range.start >= offset
+ && line_range.contains(&open_range.start))
+ .then_some((open_range.start.saturating_sub(offset), close_range.start));
+
+ let close_candidate = (close_range.start >= offset
+ && line_range.contains(&close_range.start))
+ .then_some((close_range.start.saturating_sub(offset), open_range.start));
+
+ if let Some((_, destination)) = [open_candidate, close_candidate]
+ .into_iter()
+ .flatten()
+ .min_by_key(|(distance, _)| *distance)
+ {
+ return destination.to_display_point(map);
+ }
+ }
+
closest_pair_destination
.map(|destination| destination.to_display_point(map))
.unwrap_or_else(|| {
@@ -3515,6 +3643,119 @@ mod test {
);
}
+ #[gpui::test]
+ async fn test_matching_comments(cx: &mut gpui::TestAppContext) {
+ let mut cx = NeovimBackedTestContext::new(cx).await;
+
+ cx.set_shared_state(indoc! {r"ˇ/*
+ this is a comment
+ */"})
+ .await;
+ cx.simulate_shared_keystrokes("%").await;
+ cx.shared_state().await.assert_eq(indoc! {r"/*
+ this is a comment
+ ˇ*/"});
+ cx.simulate_shared_keystrokes("%").await;
+ cx.shared_state().await.assert_eq(indoc! {r"ˇ/*
+ this is a comment
+ */"});
+ cx.simulate_shared_keystrokes("%").await;
+ cx.shared_state().await.assert_eq(indoc! {r"/*
+ this is a comment
+ ˇ*/"});
+
+ cx.set_shared_state("ˇ// comment").await;
+ cx.simulate_shared_keystrokes("%").await;
+ cx.shared_state().await.assert_eq("ˇ// comment");
+ }
+
+ #[gpui::test]
+ async fn test_matching_preprocessor_directives(cx: &mut gpui::TestAppContext) {
+ let mut cx = NeovimBackedTestContext::new(cx).await;
+
+ cx.set_shared_state(indoc! {r"#ˇif
+
+ #else
+
+ #endif
+ "})
+ .await;
+ cx.simulate_shared_keystrokes("%").await;
+ cx.shared_state().await.assert_eq(indoc! {r"#if
+
+ ˇ#else
+
+ #endif
+ "});
+
+ cx.simulate_shared_keystrokes("%").await;
+ cx.shared_state().await.assert_eq(indoc! {r"#if
+
+ #else
+
+ ˇ#endif
+ "});
+
+ cx.simulate_shared_keystrokes("%").await;
+ cx.shared_state().await.assert_eq(indoc! {r"ˇ#if
+
+ #else
+
+ #endif
+ "});
+
+ cx.set_shared_state(indoc! {r"
+ #ˇif
+ #if
+
+ #else
+
+ #endif
+
+ #else
+ #endif
+ "})
+ .await;
+
+ cx.simulate_shared_keystrokes("%").await;
+ cx.shared_state().await.assert_eq(indoc! {r"
+ #if
+ #if
+
+ #else
+
+ #endif
+
+ ˇ#else
+ #endif
+ "});
+
+ cx.simulate_shared_keystrokes("% %").await;
+ cx.shared_state().await.assert_eq(indoc! {r"
+ ˇ#if
+ #if
+
+ #else
+
+ #endif
+
+ #else
+ #endif
+ "});
+ cx.simulate_shared_keystrokes("j % % %").await;
+ cx.shared_state().await.assert_eq(indoc! {r"
+ #if
+ ˇ#if
+
+ #else
+
+ #endif
+
+ #else
+ #endif
+ "});
+ }
+
#[gpui::test]
async fn test_unmatched_forward(cx: &mut gpui::TestAppContext) {
let mut cx = NeovimBackedTestContext::new(cx).await;
@@ -932,7 +932,7 @@ impl Vim {
Vim::take_forced_motion(cx);
self.update_editor(cx, |vim, editor, cx| {
let selection = editor.selections.newest_anchor();
- let Some((buffer, point, _)) = editor
+ let Some((buffer, point)) = editor
.buffer()
.read(cx)
.point_to_buffer_point(selection.head(), cx)
@@ -245,7 +245,7 @@ impl Vim {
search_bar.set_replacement(None, cx);
let mut options = SearchOptions::NONE;
- if action.regex {
+ if action.regex && VimSettings::get_global(cx).use_regex_search {
options |= SearchOptions::REGEX;
}
if action.backwards {
@@ -1446,4 +1446,66 @@ mod test {
// The cursor should be at the match location on line 3 (row 2).
cx.assert_state("hello world\nfoo bar\nhello ˇagain\n", Mode::Normal);
}
+
+ #[gpui::test]
+ async fn test_vim_search_respects_search_settings(cx: &mut gpui::TestAppContext) {
+ let mut cx = VimTestContext::new(cx, true).await;
+
+ cx.update_global(|store: &mut SettingsStore, cx| {
+ store.update_user_settings(cx, |settings| {
+ settings.vim.get_or_insert_default().use_regex_search = Some(false);
+ });
+ });
+
+ cx.set_state("ˇcontent", Mode::Normal);
+ cx.simulate_keystrokes("/");
+ cx.run_until_parked();
+
+ // Verify search options are set from settings
+ let search_bar = cx.workspace(|workspace, _, cx| {
+ workspace
+ .active_pane()
+ .read(cx)
+ .toolbar()
+ .read(cx)
+ .item_of_type::<BufferSearchBar>()
+ .expect("Buffer search bar should be active")
+ });
+
+ cx.update_entity(search_bar, |bar, _window, _cx| {
+ assert!(
+ !bar.has_search_option(search::SearchOptions::REGEX),
+ "Vim search open without regex mode"
+ );
+ });
+
+ cx.simulate_keystrokes("escape");
+ cx.run_until_parked();
+
+ cx.update_global(|store: &mut SettingsStore, cx| {
+ store.update_user_settings(cx, |settings| {
+ settings.vim.get_or_insert_default().use_regex_search = Some(true);
+ });
+ });
+
+ cx.simulate_keystrokes("/");
+ cx.run_until_parked();
+
+ let search_bar = cx.workspace(|workspace, _, cx| {
+ workspace
+ .active_pane()
+ .read(cx)
+ .toolbar()
+ .read(cx)
+ .item_of_type::<BufferSearchBar>()
+ .expect("Buffer search bar should be active")
+ });
+
+ cx.update_entity(search_bar, |bar, _window, _cx| {
+ assert!(
+ bar.has_search_option(search::SearchOptions::REGEX),
+ "Vim search opens with regex mode"
+ );
+ });
+ }
}
@@ -203,33 +203,24 @@ fn find_mini_delimiters(
is_valid_delimiter: &DelimiterPredicate,
) -> Option<Range<DisplayPoint>> {
let point = map.clip_at_line_end(display_point).to_point(map);
- let offset = point.to_offset(&map.buffer_snapshot());
+ let offset = map.buffer_snapshot().point_to_offset(point);
let line_range = get_line_range(map, point);
let visible_line_range = get_visible_line_range(&line_range);
let snapshot = &map.buffer_snapshot();
- let mut excerpt = snapshot.excerpt_containing(offset..offset)?;
- let buffer = excerpt.buffer();
- let buffer_offset = excerpt.map_offset_to_buffer(offset);
- let bracket_filter = |open: Range<usize>, close: Range<usize>| {
- is_valid_delimiter(buffer, open.start, close.start)
- };
-
- // Try to find delimiters in visible range first
let ranges = map
.buffer_snapshot()
.bracket_ranges(visible_line_range)
.map(|ranges| {
ranges.filter_map(|(open, close)| {
- // Convert the ranges from multibuffer space to buffer space as
- // that is what `is_valid_delimiter` expects, otherwise it might
- // panic as the values might be out of bounds.
- let buffer_open = excerpt.map_range_to_buffer(open.clone());
- let buffer_close = excerpt.map_range_to_buffer(close.clone());
+ let (buffer, buffer_open) =
+ snapshot.range_to_buffer_range::<MultiBufferOffset>(open.clone())?;
+ let (_, buffer_close) =
+ snapshot.range_to_buffer_range::<MultiBufferOffset>(close.clone())?;
- if is_valid_delimiter(buffer, buffer_open.start.0, buffer_close.start.0) {
+ if is_valid_delimiter(buffer, buffer_open.start, buffer_close.start) {
Some((open, close))
} else {
None
@@ -247,18 +238,31 @@ fn find_mini_delimiters(
);
}
- // Fall back to innermost enclosing brackets
- let (open_bracket, close_bracket) = buffer
- .innermost_enclosing_bracket_ranges(buffer_offset..buffer_offset, Some(&bracket_filter))?;
+ let results = snapshot.map_excerpt_ranges(offset..offset, |buffer, _, input_range| {
+ let buffer_offset = input_range.start.0;
+ let bracket_filter = |open: Range<usize>, close: Range<usize>| {
+ is_valid_delimiter(buffer, open.start, close.start)
+ };
+ let Some((open, close)) = buffer.innermost_enclosing_bracket_ranges(
+ buffer_offset..buffer_offset,
+ Some(&bracket_filter),
+ ) else {
+ return vec![];
+ };
+ vec![
+ (BufferOffset(open.start)..BufferOffset(open.end), ()),
+ (BufferOffset(close.start)..BufferOffset(close.end), ()),
+ ]
+ })?;
+
+ if results.len() < 2 {
+ return None;
+ }
Some(
DelimiterRange {
- open: excerpt.map_range_from_buffer(
- BufferOffset(open_bracket.start)..BufferOffset(open_bracket.end),
- ),
- close: excerpt.map_range_from_buffer(
- BufferOffset(close_bracket.start)..BufferOffset(close_bracket.end),
- ),
+ open: results[0].0.clone(),
+ close: results[1].0.clone(),
}
.to_display_range(map, around),
)
@@ -935,61 +939,64 @@ pub fn surrounding_html_tag(
}
let snapshot = &map.buffer_snapshot();
- let offset = head.to_offset(map, Bias::Left);
- let mut excerpt = snapshot.excerpt_containing(offset..offset)?;
- let buffer = excerpt.buffer();
- let offset = excerpt.map_offset_to_buffer(offset);
-
- // Find the most closest to current offset
- let mut cursor = buffer.syntax_layer_at(offset)?.node().walk();
- let mut last_child_node = cursor.node();
- while cursor.goto_first_child_for_byte(offset.0).is_some() {
- last_child_node = cursor.node();
- }
-
- let mut last_child_node = Some(last_child_node);
- while let Some(cur_node) = last_child_node {
- if cur_node.child_count() >= 2 {
- let first_child = cur_node.child(0);
- let last_child = cur_node.child(cur_node.child_count() as u32 - 1);
- if let (Some(first_child), Some(last_child)) = (first_child, last_child) {
- let open_tag = open_tag(buffer.chars_for_range(first_child.byte_range()));
- let close_tag = close_tag(buffer.chars_for_range(last_child.byte_range()));
- // It needs to be handled differently according to the selection length
- let is_valid = if range.end.to_offset(map, Bias::Left)
- - range.start.to_offset(map, Bias::Left)
- <= 1
- {
- offset.0 <= last_child.end_byte()
- } else {
- excerpt
- .map_offset_to_buffer(range.start.to_offset(map, Bias::Left))
- .0
- >= first_child.start_byte()
- && excerpt
- .map_offset_to_buffer(range.end.to_offset(map, Bias::Left))
- .0
- <= last_child.start_byte() + 1
- };
- if open_tag.is_some() && open_tag == close_tag && is_valid {
- let range = if around {
- first_child.byte_range().start..last_child.byte_range().end
- } else {
- first_child.byte_range().end..last_child.byte_range().start
- };
- let range = BufferOffset(range.start)..BufferOffset(range.end);
- if excerpt.contains_buffer_range(range.clone()) {
- let result = excerpt.map_range_from_buffer(range);
- return Some(
- result.start.to_display_point(map)..result.end.to_display_point(map),
- );
+ let head_offset = head.to_offset(map, Bias::Left);
+ let range_start = range.start.to_offset(map, Bias::Left);
+ let range_end = range.end.to_offset(map, Bias::Left);
+ let head_is_start = head_offset <= range_start;
+
+ let results = snapshot.map_excerpt_ranges(
+ range_start..range_end,
+ |buffer, _excerpt_range, input_buffer_range| {
+ let buffer_offset = if head_is_start {
+ input_buffer_range.start
+ } else {
+ input_buffer_range.end
+ };
+
+ let Some(layer) = buffer.syntax_layer_at(buffer_offset) else {
+ return Vec::new();
+ };
+ let mut cursor = layer.node().walk();
+ let mut last_child_node = cursor.node();
+ while cursor.goto_first_child_for_byte(buffer_offset.0).is_some() {
+ last_child_node = cursor.node();
+ }
+
+ let mut last_child_node = Some(last_child_node);
+ while let Some(cur_node) = last_child_node {
+ if cur_node.child_count() >= 2 {
+ let first_child = cur_node.child(0);
+ let last_child = cur_node.child(cur_node.child_count() as u32 - 1);
+ if let (Some(first_child), Some(last_child)) = (first_child, last_child) {
+ let open_tag = open_tag(buffer.chars_for_range(first_child.byte_range()));
+ let close_tag = close_tag(buffer.chars_for_range(last_child.byte_range()));
+ let is_valid = if range_end.saturating_sub(range_start) <= 1 {
+ buffer_offset.0 <= last_child.end_byte()
+ } else {
+ input_buffer_range.start.0 >= first_child.start_byte()
+ && input_buffer_range.end.0 <= last_child.start_byte() + 1
+ };
+ if open_tag.is_some() && open_tag == close_tag && is_valid {
+ let buffer_range = if around {
+ first_child.byte_range().start..last_child.byte_range().end
+ } else {
+ first_child.byte_range().end..last_child.byte_range().start
+ };
+ return vec![(
+ BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end),
+ (),
+ )];
+ }
}
}
+ last_child_node = cur_node.parent();
}
- }
- last_child_node = cur_node.parent();
- }
- None
+ Vec::new()
+ },
+ )?;
+
+ let (result, ()) = results.into_iter().next()?;
+ Some(result.start.to_display_point(map)..result.end.to_display_point(map))
}
/// Returns a range that surrounds the word and following whitespace
@@ -1163,44 +1170,55 @@ fn text_object(
let snapshot = &map.buffer_snapshot();
let offset = relative_to.to_offset(map, Bias::Left);
- let mut excerpt = snapshot.excerpt_containing(offset..offset)?;
- let buffer = excerpt.buffer();
- let offset = excerpt.map_offset_to_buffer(offset);
-
- let mut matches: Vec<Range<usize>> = buffer
- .text_object_ranges(offset..offset, TreeSitterOptions::default())
- .filter_map(|(r, m)| if m == target { Some(r) } else { None })
- .collect();
- matches.sort_by_key(|r| r.end - r.start);
- if let Some(buffer_range) = matches.first() {
- let buffer_range = BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end);
- let range = excerpt.map_range_from_buffer(buffer_range);
- return Some(range.start.to_display_point(map)..range.end.to_display_point(map));
- }
-
- let around = target.around()?;
- let mut matches: Vec<Range<usize>> = buffer
- .text_object_ranges(offset..offset, TreeSitterOptions::default())
- .filter_map(|(r, m)| if m == around { Some(r) } else { None })
- .collect();
- matches.sort_by_key(|r| r.end - r.start);
- let around_range = matches.first()?;
-
- let mut matches: Vec<Range<usize>> = buffer
- .text_object_ranges(around_range.clone(), TreeSitterOptions::default())
- .filter_map(|(r, m)| if m == target { Some(r) } else { None })
- .collect();
- matches.sort_by_key(|r| r.start);
- if let Some(buffer_range) = matches.first()
- && !buffer_range.is_empty()
- {
- let buffer_range = BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end);
- let range = excerpt.map_range_from_buffer(buffer_range);
- return Some(range.start.to_display_point(map)..range.end.to_display_point(map));
- }
- let around_range = BufferOffset(around_range.start)..BufferOffset(around_range.end);
- let buffer_range = excerpt.map_range_from_buffer(around_range);
- return Some(buffer_range.start.to_display_point(map)..buffer_range.end.to_display_point(map));
+ let results =
+ snapshot.map_excerpt_ranges(offset..offset, |buffer, _excerpt_range, buffer_range| {
+ let buffer_offset = buffer_range.start;
+
+ let mut matches: Vec<Range<usize>> = buffer
+ .text_object_ranges(buffer_offset..buffer_offset, TreeSitterOptions::default())
+ .filter_map(|(r, m)| if m == target { Some(r) } else { None })
+ .collect();
+ matches.sort_by_key(|r| r.end - r.start);
+ if let Some(buffer_range) = matches.first() {
+ return vec![(
+ BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end),
+ (),
+ )];
+ }
+
+ let Some(around) = target.around() else {
+ return vec![];
+ };
+ let mut matches: Vec<Range<usize>> = buffer
+ .text_object_ranges(buffer_offset..buffer_offset, TreeSitterOptions::default())
+ .filter_map(|(r, m)| if m == around { Some(r) } else { None })
+ .collect();
+ matches.sort_by_key(|r| r.end - r.start);
+ let Some(around_range) = matches.first() else {
+ return vec![];
+ };
+
+ let mut matches: Vec<Range<usize>> = buffer
+ .text_object_ranges(around_range.clone(), TreeSitterOptions::default())
+ .filter_map(|(r, m)| if m == target { Some(r) } else { None })
+ .collect();
+ matches.sort_by_key(|r| r.start);
+ if let Some(buffer_range) = matches.first()
+ && !buffer_range.is_empty()
+ {
+ return vec![(
+ BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end),
+ (),
+ )];
+ }
+ vec![(
+ BufferOffset(around_range.start)..BufferOffset(around_range.end),
+ (),
+ )]
+ })?;
+
+ let (range, ()) = results.into_iter().next()?;
+ Some(range.start.to_display_point(map)..range.end.to_display_point(map))
}
fn argument(
@@ -1211,16 +1229,11 @@ fn argument(
let snapshot = &map.buffer_snapshot();
let offset = relative_to.to_offset(map, Bias::Left);
- // The `argument` vim text object uses the syntax tree, so we operate at the buffer level and map back to the display level
- let mut excerpt = snapshot.excerpt_containing(offset..offset)?;
- let buffer = excerpt.buffer();
-
fn comma_delimited_range_at(
buffer: &BufferSnapshot,
mut offset: BufferOffset,
include_comma: bool,
) -> Option<Range<BufferOffset>> {
- // Seek to the first non-whitespace character
offset += buffer
.chars_at(offset)
.take_while(|c| c.is_whitespace())
@@ -1228,25 +1241,20 @@ fn argument(
.sum::<usize>();
let bracket_filter = |open: Range<usize>, close: Range<usize>| {
- // Filter out empty ranges
if open.end == close.start {
return false;
}
- // If the cursor is outside the brackets, ignore them
if open.start == offset.0 || close.end == offset.0 {
return false;
}
- // TODO: Is there any better way to filter out string brackets?
- // Used to filter out string brackets
matches!(
buffer.chars_at(open.start).next(),
Some('(' | '[' | '{' | '<' | '|')
)
};
- // Find the brackets containing the cursor
let (open_bracket, close_bracket) =
buffer.innermost_enclosing_bracket_ranges(offset..offset, Some(&bracket_filter))?;
@@ -1256,7 +1264,6 @@ fn argument(
let node = layer.node();
let mut cursor = node.walk();
- // Loop until we find the smallest node whose parent covers the bracket range. This node is the argument in the parent argument list
let mut parent_covers_bracket_range = false;
loop {
let node = cursor.node();
@@ -1268,20 +1275,17 @@ fn argument(
}
parent_covers_bracket_range = covers_bracket_range;
- // Unable to find a child node with a parent that covers the bracket range, so no argument to select
cursor.goto_first_child_for_byte(offset.0)?;
}
let mut argument_node = cursor.node();
- // If the child node is the open bracket, move to the next sibling.
if argument_node.byte_range() == open_bracket {
if !cursor.goto_next_sibling() {
return Some(inner_bracket_range);
}
argument_node = cursor.node();
}
- // While the child node is the close bracket or a comma, move to the previous sibling
while argument_node.byte_range() == close_bracket || argument_node.kind() == "," {
if !cursor.goto_previous_sibling() {
return Some(inner_bracket_range);
@@ -1292,14 +1296,11 @@ fn argument(
}
}
- // The start and end of the argument range, defaulting to the start and end of the argument node
let mut start = argument_node.start_byte();
let mut end = argument_node.end_byte();
let mut needs_surrounding_comma = include_comma;
- // Seek backwards to find the start of the argument - either the previous comma or the opening bracket.
- // We do this because multiple nodes can represent a single argument, such as with rust `vec![a.b.c, d.e.f]`
while cursor.goto_previous_sibling() {
let prev = cursor.node();
@@ -1317,7 +1318,6 @@ fn argument(
}
}
- // Do the same for the end of the argument, extending to next comma or the end of the argument list
while cursor.goto_next_sibling() {
let next = cursor.node();
@@ -1326,7 +1326,6 @@ fn argument(
break;
} else if next.kind() == "," {
if needs_surrounding_comma {
- // Select up to the beginning of the next argument if there is one, otherwise to the end of the comma
if let Some(next_arg) = next.next_sibling() {
end = next_arg.start_byte();
} else {
@@ -1342,14 +1341,17 @@ fn argument(
Some(BufferOffset(start)..BufferOffset(end))
}
- let result = comma_delimited_range_at(buffer, excerpt.map_offset_to_buffer(offset), around)?;
+ let results =
+ snapshot.map_excerpt_ranges(offset..offset, |buffer, _excerpt_range, buffer_range| {
+ let buffer_offset = buffer_range.start;
+ match comma_delimited_range_at(buffer, buffer_offset, around) {
+ Some(result) => vec![(result, ())],
+ None => vec![],
+ }
+ })?;
- if excerpt.contains_buffer_range(result.clone()) {
- let result = excerpt.map_range_from_buffer(result);
- Some(result.start.to_display_point(map)..result.end.to_display_point(map))
- } else {
- None
- }
+ let (range, ()) = results.into_iter().next()?;
+ Some(range.start.to_display_point(map)..range.end.to_display_point(map))
}
fn indent(
@@ -3369,7 +3371,12 @@ mod test {
// but, since this is being set manually, the language isn't
// automatically set.
let editor = Editor::new(EditorMode::full(), multi_buffer.clone(), None, window, cx);
- let buffer_ids = multi_buffer.read(cx).excerpt_buffer_ids();
+ let buffer_ids = multi_buffer
+ .read(cx)
+ .snapshot(cx)
+ .excerpts()
+ .map(|excerpt| excerpt.context.start.buffer_id)
+ .collect::<Vec<_>>();
if let Some(buffer) = multi_buffer.read(cx).buffer(buffer_ids[1]) {
buffer.update(cx, |buffer, cx| {
buffer.set_language(Some(language::rust_lang()), cx);
@@ -426,7 +426,7 @@ impl MarksState {
name.clone(),
buffer
.read(cx)
- .summaries_for_anchors::<Point, _>(anchors)
+ .summaries_for_anchors::<Point, _>(anchors.iter().copied())
.collect(),
)
})
@@ -492,7 +492,14 @@ impl MarksState {
{
let buffer_marks = old_marks
.into_iter()
- .map(|(k, v)| (k, v.into_iter().map(|anchor| anchor.text_anchor).collect()))
+ .map(|(k, v)| {
+ (
+ k,
+ v.into_iter()
+ .filter_map(|anchor| anchor.raw_text_anchor())
+ .collect(),
+ )
+ })
.collect();
self.buffer_marks
.insert(buffer.read(cx).remote_id(), buffer_marks);
@@ -569,6 +576,7 @@ impl MarksState {
anchors: Vec<Anchor>,
cx: &mut Context<Self>,
) {
+ let multibuffer_snapshot = multibuffer.read(cx).snapshot(cx);
let buffer = multibuffer.read(cx).as_singleton();
let abs_path = buffer.as_ref().and_then(|b| self.path_for_buffer(b, cx));
@@ -602,7 +610,7 @@ impl MarksState {
name.clone(),
anchors
.into_iter()
- .map(|anchor| anchor.text_anchor)
+ .filter_map(|anchor| Some(multibuffer_snapshot.anchor_to_buffer_anchor(anchor)?.0))
.collect(),
);
if !self.watched_buffers.contains_key(&buffer_id) {
@@ -629,12 +637,13 @@ impl MarksState {
return Some(Mark::Local(anchors.get(name)?.clone()));
}
- let (excerpt_id, buffer_id, _) = multi_buffer.read(cx).read(cx).as_singleton()?;
- if let Some(anchors) = self.buffer_marks.get(&buffer_id) {
+ let multibuffer_snapshot = multi_buffer.read(cx).snapshot(cx);
+ let buffer_snapshot = multibuffer_snapshot.as_singleton()?;
+ if let Some(anchors) = self.buffer_marks.get(&buffer_snapshot.remote_id()) {
let text_anchors = anchors.get(name)?;
let anchors = text_anchors
.iter()
- .map(|anchor| Anchor::in_buffer(excerpt_id, *anchor))
+ .filter_map(|anchor| multibuffer_snapshot.anchor_in_excerpt(*anchor))
.collect();
return Some(Mark::Local(anchors));
}
@@ -895,14 +904,13 @@ impl VimGlobals {
}
}
'%' => editor.and_then(|editor| {
- let selection = editor
- .selections
- .newest::<Point>(&editor.display_snapshot(cx));
- if let Some((_, buffer, _)) = editor
- .buffer()
- .read(cx)
- .excerpt_containing(selection.head(), cx)
- {
+ let multibuffer = editor.buffer().read(cx);
+ let snapshot = multibuffer.snapshot(cx);
+ let selection = editor.selections.newest_anchor();
+ let buffer = snapshot
+ .anchor_to_buffer_anchor(selection.head())
+ .and_then(|(text_anchor, _)| multibuffer.buffer(text_anchor.buffer_id));
+ if let Some(buffer) = buffer {
buffer
.read(cx)
.file()
@@ -2117,7 +2117,12 @@ async fn test_folded_multibuffer_excerpts(cx: &mut gpui::TestAppContext) {
);
let mut editor = Editor::new(EditorMode::full(), multi_buffer.clone(), None, window, cx);
- let buffer_ids = multi_buffer.read(cx).excerpt_buffer_ids();
+ let buffer_ids = multi_buffer
+ .read(cx)
+ .snapshot(cx)
+ .excerpts()
+ .map(|excerpt| excerpt.context.start.buffer_id)
+ .collect::<Vec<_>>();
// fold all but the second buffer, so that we test navigating between two
// adjacent folded buffers, as well as folded buffers at the start and
// end the multibuffer
@@ -2262,7 +2267,13 @@ async fn test_folded_multibuffer_excerpts(cx: &mut gpui::TestAppContext) {
"
});
cx.update_editor(|editor, _, cx| {
- let buffer_ids = editor.buffer().read(cx).excerpt_buffer_ids();
+ let buffer_ids = editor
+ .buffer()
+ .read(cx)
+ .snapshot(cx)
+ .excerpts()
+ .map(|excerpt| excerpt.context.start.buffer_id)
+ .collect::<Vec<_>>();
editor.fold_buffer(buffer_ids[1], cx);
});
@@ -109,12 +109,12 @@ impl VimTestContext {
}
cx.bind_keys(default_key_bindings);
if enabled {
- let vim_key_bindings = settings::KeymapFile::load_asset(
- "keymaps/vim.json",
- Some(settings::KeybindSource::Vim),
- cx,
- )
- .unwrap();
+ let mut vim_key_bindings =
+ settings::KeymapFile::load_asset_allow_partial_failure("keymaps/vim.json", cx)
+ .unwrap();
+ for key_binding in &mut vim_key_bindings {
+ key_binding.set_meta(settings::KeybindSource::Vim.meta());
+ }
cx.bind_keys(vim_key_bindings);
}
}
@@ -2141,6 +2141,7 @@ struct VimSettings {
pub toggle_relative_line_numbers: bool,
pub use_system_clipboard: settings::UseSystemClipboard,
pub use_smartcase_find: bool,
+ pub use_regex_search: bool,
pub gdefault: bool,
pub custom_digraphs: HashMap<String, Arc<str>>,
pub highlight_on_yank_duration: u64,
@@ -2227,6 +2228,7 @@ impl Settings for VimSettings {
toggle_relative_line_numbers: vim.toggle_relative_line_numbers.unwrap(),
use_system_clipboard: vim.use_system_clipboard.unwrap(),
use_smartcase_find: vim.use_smartcase_find.unwrap(),
+ use_regex_search: vim.use_regex_search.unwrap(),
gdefault: vim.gdefault.unwrap(),
custom_digraphs: vim.custom_digraphs.unwrap(),
highlight_on_yank_duration: vim.highlight_on_yank_duration.unwrap(),
@@ -0,0 +1,10 @@
+{"Put":{"state":"ˇ/*\n this is a comment\n*/"}}
+{"Key":"%"}
+{"Get":{"state":"/*\n this is a comment\nˇ*/","mode":"Normal"}}
+{"Key":"%"}
+{"Get":{"state":"ˇ/*\n this is a comment\n*/","mode":"Normal"}}
+{"Key":"%"}
+{"Get":{"state":"/*\n this is a comment\nˇ*/","mode":"Normal"}}
+{"Put":{"state":"ˇ// comment"}}
+{"Key":"%"}
+{"Get":{"state":"ˇ// comment","mode":"Normal"}}
@@ -0,0 +1,18 @@
+{"Put":{"state":"#ˇif\n\n#else\n\n#endif\n"}}
+{"Key":"%"}
+{"Get":{"state":"#if\n\nˇ#else\n\n#endif\n","mode":"Normal"}}
+{"Key":"%"}
+{"Get":{"state":"#if\n\n#else\n\nˇ#endif\n","mode":"Normal"}}
+{"Key":"%"}
+{"Get":{"state":"ˇ#if\n\n#else\n\n#endif\n","mode":"Normal"}}
+{"Put":{"state":"#ˇif\n #if\n\n #else\n\n #endif\n\n#else\n#endif\n"}}
+{"Key":"%"}
+{"Get":{"state":"#if\n #if\n\n #else\n\n #endif\n\nˇ#else\n#endif\n","mode":"Normal"}}
+{"Key":"%"}
+{"Key":"%"}
+{"Get":{"state":"ˇ#if\n #if\n\n #else\n\n #endif\n\n#else\n#endif\n","mode":"Normal"}}
+{"Key":"j"}
+{"Key":"%"}
+{"Key":"%"}
+{"Key":"%"}
+{"Get":{"state":"#if\n ˇ#if\n\n #else\n\n #endif\n\n#else\n#endif\n","mode":"Normal"}}
@@ -1,13 +1,13 @@
use std::sync::Arc;
use anyhow::{Context as _, Result};
-use client::{Client, UserStore};
+use client::{Client, NeedsLlmTokenRefresh, UserStore, global_llm_token};
use cloud_api_types::OrganizationId;
use cloud_llm_client::{WebSearchBody, WebSearchResponse};
use futures::AsyncReadExt as _;
use gpui::{App, AppContext, Context, Entity, Task};
use http_client::{HttpClient, Method};
-use language_model::{LlmApiToken, NeedsLlmTokenRefresh};
+use language_model::LlmApiToken;
use web_search::{WebSearchProvider, WebSearchProviderId};
pub struct CloudWebSearchProvider {
@@ -30,7 +30,7 @@ pub struct State {
impl State {
pub fn new(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut Context<Self>) -> Self {
- let llm_api_token = LlmApiToken::global(cx);
+ let llm_api_token = global_llm_token(cx);
Self {
client,
@@ -73,8 +73,8 @@ async fn perform_web_search(
let http_client = &client.http_client();
let mut retries_remaining = MAX_RETRIES;
- let mut token = llm_api_token
- .acquire(&client, organization_id.clone())
+ let mut token = client
+ .acquire_llm_token(&llm_api_token, organization_id.clone())
.await?;
loop {
@@ -100,8 +100,8 @@ async fn perform_web_search(
response.body_mut().read_to_string(&mut body).await?;
return Ok(serde_json::from_str(&body)?);
} else if response.needs_llm_token_refresh() {
- token = llm_api_token
- .refresh(&client, organization_id.clone())
+ token = client
+ .refresh_llm_token(&llm_api_token, organization_id.clone())
.await?;
retries_remaining -= 1;
} else {
@@ -1,5 +1,6 @@
+use crate::focus_follows_mouse::FocusFollowsMouse as _;
use crate::persistence::model::DockData;
-use crate::{DraggedDock, Event, ModalLayer, Pane};
+use crate::{DraggedDock, Event, FocusFollowsMouse, ModalLayer, Pane, WorkspaceSettings};
use crate::{Workspace, status_bar::StatusItemView};
use anyhow::Context as _;
use client::proto;
@@ -12,7 +13,7 @@ use gpui::{
px,
};
use serde::{Deserialize, Serialize};
-use settings::SettingsStore;
+use settings::{Settings, SettingsStore};
use std::sync::Arc;
use ui::{
ContextMenu, CountBadge, Divider, DividerColor, IconButton, Tooltip, prelude::*,
@@ -252,6 +253,7 @@ pub struct Dock {
is_open: bool,
active_panel_index: Option<usize>,
focus_handle: FocusHandle,
+ focus_follows_mouse: FocusFollowsMouse,
pub(crate) serialized_dock: Option<DockData>,
zoom_layer_open: bool,
modal_layer: Entity<ModalLayer>,
@@ -376,6 +378,7 @@ impl Dock {
active_panel_index: None,
is_open: false,
focus_handle: focus_handle.clone(),
+ focus_follows_mouse: WorkspaceSettings::get_global(cx).focus_follows_mouse,
_subscriptions: [focus_subscription, zoom_subscription],
serialized_dock: None,
zoom_layer_open: false,
@@ -1086,8 +1089,10 @@ impl Render for Dock {
};
div()
+ .id("dock-panel")
.key_context(dispatch_context)
.track_focus(&self.focus_handle(cx))
+ .focus_follows_mouse(self.focus_follows_mouse, cx)
.flex()
.bg(cx.theme().colors().panel_background)
.border_color(cx.theme().colors().border)
@@ -1121,6 +1126,7 @@ impl Render for Dock {
})
} else {
div()
+ .id("dock-panel")
.key_context(dispatch_context)
.track_focus(&self.focus_handle(cx))
}
@@ -0,0 +1,71 @@
+use gpui::{
+ AnyWindowHandle, AppContext as _, Context, FocusHandle, Focusable, Global,
+ StatefulInteractiveElement, Task,
+};
+
+use crate::workspace_settings;
+
+#[derive(Default)]
+struct FfmState {
+ // The window and element to be focused
+ handles: Option<(AnyWindowHandle, FocusHandle)>,
+ // The debounced task which will do the focusing
+ _debounce_task: Option<Task<()>>,
+}
+
+impl Global for FfmState {}
+
+pub trait FocusFollowsMouse<E: Focusable>: StatefulInteractiveElement {
+ fn focus_follows_mouse(
+ self,
+ settings: workspace_settings::FocusFollowsMouse,
+ cx: &Context<E>,
+ ) -> Self {
+ if settings.enabled {
+ self.on_hover(cx.listener(move |this, enter, window, cx| {
+ if *enter {
+ let window_handle = window.window_handle();
+ let focus_handle = this.focus_handle(cx);
+
+ let state = cx.try_global::<FfmState>();
+
+ // Only replace the target if the new handle doesn't contain the existing one.
+ // This ensures that hovering over a parent (e.g., Dock) doesn't override
+ // a more specific child target (e.g., a Pane inside the Dock).
+ let should_replace = state
+ .and_then(|s| s.handles.as_ref())
+ .map(|(_, existing)| !focus_handle.contains(existing, window))
+ .unwrap_or(true);
+
+ if !should_replace {
+ return;
+ }
+
+ let debounce_task = cx.spawn(async move |_this, cx| {
+ cx.background_executor().timer(settings.debounce).await;
+
+ cx.update(|cx| {
+ let state = cx.default_global::<FfmState>();
+ let Some((window, focus)) = state.handles.take() else {
+ return;
+ };
+
+ let _ = cx.update_window(window, move |_view, window, cx| {
+ window.focus(&focus, cx);
+ });
+ });
+ });
+
+ cx.set_global(FfmState {
+ handles: Some((window_handle, focus_handle)),
+ _debounce_task: Some(debounce_task),
+ });
+ }
+ }))
+ } else {
+ self
+ }
+ }
+}
+
+impl<E: Focusable, T: StatefulInteractiveElement> FocusFollowsMouse<E> for T {}
@@ -9,7 +9,7 @@ use crate::{
};
use anyhow::Result;
use client::{Client, proto};
-use futures::{StreamExt, channel::mpsc};
+use futures::channel::mpsc;
use gpui::{
Action, AnyElement, AnyEntity, AnyView, App, AppContext, Context, Entity, EntityId,
EventEmitter, FocusHandle, Focusable, Font, Pixels, Point, Render, SharedString, Task,
@@ -777,8 +777,8 @@ impl<T: Item> ItemHandle for Entity<T> {
send_follower_updates = Some(cx.spawn_in(window, {
let pending_update = pending_update.clone();
async move |workspace, cx| {
- while let Some(mut leader_id) = pending_update_rx.next().await {
- while let Ok(Some(id)) = pending_update_rx.try_next() {
+ while let Ok(mut leader_id) = pending_update_rx.recv().await {
+ while let Ok(id) = pending_update_rx.try_recv() {
leader_id = id;
}
@@ -1,20 +1,21 @@
use anyhow::Result;
use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt};
+use gpui::PathPromptOptions;
use gpui::{
AnyView, App, Context, DragMoveEvent, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
ManagedView, MouseButton, Pixels, Render, Subscription, Task, Tiling, Window, WindowId,
actions, deferred, px,
};
-use project::DisableAiSettings;
-#[cfg(any(test, feature = "test-support"))]
-use project::Project;
+use project::{DirectoryLister, DisableAiSettings, Project, ProjectGroupKey};
use settings::Settings;
pub use settings::SidebarSide;
use std::future::Future;
+use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use ui::prelude::*;
use util::ResultExt;
+use util::path_list::PathList;
use zed_actions::agents_sidebar::{MoveWorkspaceToNewWindow, ToggleThreadSwitcher};
use agent_settings::AgentSettings;
@@ -23,9 +24,11 @@ use ui::{ContextMenu, right_click_menu};
const SIDEBAR_RESIZE_HANDLE_SIZE: Pixels = px(6.0);
+use crate::AppState;
use crate::{
CloseIntent, CloseWindow, DockPosition, Event as WorkspaceEvent, Item, ModalView, OpenMode,
Panel, Workspace, WorkspaceId, client_side_decorations,
+ persistence::model::MultiWorkspaceState,
};
actions!(
@@ -37,10 +40,7 @@ actions!(
CloseWorkspaceSidebar,
/// Moves focus to or from the workspace sidebar without closing it.
FocusWorkspaceSidebar,
- /// Switches to the next workspace.
- NextWorkspace,
- /// Switches to the previous workspace.
- PreviousWorkspace,
+ //TODO: Restore next/previous workspace
]
);
@@ -218,10 +218,58 @@ impl<T: Sidebar> SidebarHandle for Entity<T> {
}
}
+/// Tracks which workspace the user is currently looking at.
+///
+/// `Persistent` workspaces live in the `workspaces` vec and are shown in the
+/// sidebar. `Transient` workspaces exist outside the vec and are discarded
+/// when the user switches away.
+enum ActiveWorkspace {
+ /// A persistent workspace, identified by index into the `workspaces` vec.
+ Persistent(usize),
+ /// A workspace not in the `workspaces` vec that will be discarded on
+ /// switch or promoted to persistent when the sidebar is opened.
+ Transient(Entity<Workspace>),
+}
+
+impl ActiveWorkspace {
+ fn persistent_index(&self) -> Option<usize> {
+ match self {
+ Self::Persistent(index) => Some(*index),
+ Self::Transient(_) => None,
+ }
+ }
+
+ fn transient_workspace(&self) -> Option<&Entity<Workspace>> {
+ match self {
+ Self::Transient(workspace) => Some(workspace),
+ Self::Persistent(_) => None,
+ }
+ }
+
+ /// Sets the active workspace to transient, returning the previous
+ /// transient workspace (if any).
+ fn set_transient(&mut self, workspace: Entity<Workspace>) -> Option<Entity<Workspace>> {
+ match std::mem::replace(self, Self::Transient(workspace)) {
+ Self::Transient(old) => Some(old),
+ Self::Persistent(_) => None,
+ }
+ }
+
+ /// Sets the active workspace to persistent at the given index,
+ /// returning the previous transient workspace (if any).
+ fn set_persistent(&mut self, index: usize) -> Option<Entity<Workspace>> {
+ match std::mem::replace(self, Self::Persistent(index)) {
+ Self::Transient(workspace) => Some(workspace),
+ Self::Persistent(_) => None,
+ }
+ }
+}
+
pub struct MultiWorkspace {
window_id: WindowId,
workspaces: Vec<Entity<Workspace>>,
- active_workspace_index: usize,
+ active_workspace: ActiveWorkspace,
+ project_group_keys: Vec<ProjectGroupKey>,
sidebar: Option<Box<dyn SidebarHandle>>,
sidebar_open: bool,
sidebar_overlay: Option<AnyView>,
@@ -256,12 +304,15 @@ impl MultiWorkspace {
}
});
let quit_subscription = cx.on_app_quit(Self::app_will_quit);
- let settings_subscription =
- cx.observe_global_in::<settings::SettingsStore>(window, |this, window, cx| {
- if DisableAiSettings::get_global(cx).disable_ai && this.sidebar_open {
- this.close_sidebar(window, cx);
+ let settings_subscription = cx.observe_global_in::<settings::SettingsStore>(window, {
+ let mut previous_disable_ai = DisableAiSettings::get_global(cx).disable_ai;
+ move |this, window, cx| {
+ if DisableAiSettings::get_global(cx).disable_ai != previous_disable_ai {
+ this.collapse_to_single_workspace(window, cx);
+ previous_disable_ai = DisableAiSettings::get_global(cx).disable_ai;
}
- });
+ }
+ });
Self::subscribe_to_workspace(&workspace, window, cx);
let weak_self = cx.weak_entity();
workspace.update(cx, |workspace, cx| {
@@ -269,8 +320,9 @@ impl MultiWorkspace {
});
Self {
window_id: window.window_handle().window_id(),
- workspaces: vec![workspace],
- active_workspace_index: 0,
+ project_group_keys: Vec::new(),
+ workspaces: Vec::new(),
+ active_workspace: ActiveWorkspace::Transient(workspace),
sidebar: None,
sidebar_open: false,
sidebar_overlay: None,
@@ -332,7 +384,7 @@ impl MultiWorkspace {
return;
}
- if self.sidebar_open {
+ if self.sidebar_open() {
self.close_sidebar(window, cx);
} else {
self.open_sidebar(cx);
@@ -348,7 +400,7 @@ impl MultiWorkspace {
return;
}
- if self.sidebar_open {
+ if self.sidebar_open() {
self.close_sidebar(window, cx);
}
}
@@ -358,7 +410,7 @@ impl MultiWorkspace {
return;
}
- if self.sidebar_open {
+ if self.sidebar_open() {
let sidebar_is_focused = self
.sidebar
.as_ref()
@@ -383,8 +435,13 @@ impl MultiWorkspace {
pub fn open_sidebar(&mut self, cx: &mut Context<Self>) {
self.sidebar_open = true;
+ if let ActiveWorkspace::Transient(workspace) = &self.active_workspace {
+ let workspace = workspace.clone();
+ let index = self.promote_transient(workspace, cx);
+ self.active_workspace = ActiveWorkspace::Persistent(index);
+ }
let sidebar_focus_handle = self.sidebar.as_ref().map(|s| s.focus_handle(cx));
- for workspace in &self.workspaces {
+ for workspace in self.workspaces.iter() {
workspace.update(cx, |workspace, _cx| {
workspace.set_sidebar_focus_handle(sidebar_focus_handle.clone());
});
@@ -395,7 +452,7 @@ impl MultiWorkspace {
pub fn close_sidebar(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.sidebar_open = false;
- for workspace in &self.workspaces {
+ for workspace in self.workspaces.iter() {
workspace.update(cx, |workspace, _cx| {
workspace.set_sidebar_focus_handle(None);
});
@@ -410,7 +467,7 @@ impl MultiWorkspace {
pub fn close_window(&mut self, _: &CloseWindow, window: &mut Window, cx: &mut Context<Self>) {
cx.spawn_in(window, async move |this, cx| {
let workspaces = this.update(cx, |multi_workspace, _cx| {
- multi_workspace.workspaces().to_vec()
+ multi_workspace.workspaces().cloned().collect::<Vec<_>>()
})?;
for workspace in workspaces {
@@ -438,6 +495,20 @@ impl MultiWorkspace {
window: &Window,
cx: &mut Context<Self>,
) {
+ let project = workspace.read(cx).project().clone();
+ cx.subscribe_in(&project, window, {
+ let workspace = workspace.downgrade();
+ move |this, _project, event, _window, cx| match event {
+ project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => {
+ if let Some(workspace) = workspace.upgrade() {
+ this.add_project_group_key(workspace.read(cx).project_group_key(cx));
+ }
+ }
+ _ => {}
+ }
+ })
+ .detach();
+
cx.subscribe_in(workspace, window, |this, workspace, event, window, cx| {
if let WorkspaceEvent::Activate = event {
this.activate(workspace.clone(), window, cx);
@@ -446,98 +517,322 @@ impl MultiWorkspace {
.detach();
}
- pub fn workspace(&self) -> &Entity<Workspace> {
- &self.workspaces[self.active_workspace_index]
+ pub fn add_project_group_key(&mut self, project_group_key: ProjectGroupKey) {
+ if project_group_key.path_list().paths().is_empty() {
+ return;
+ }
+ if self.project_group_keys.contains(&project_group_key) {
+ return;
+ }
+ self.project_group_keys.push(project_group_key);
}
- pub fn workspaces(&self) -> &[Entity<Workspace>] {
- &self.workspaces
+ pub fn restore_project_group_keys(&mut self, keys: Vec<ProjectGroupKey>) {
+ let mut restored = keys;
+ for existing_key in &self.project_group_keys {
+ if !restored.contains(existing_key) {
+ restored.push(existing_key.clone());
+ }
+ }
+ self.project_group_keys = restored;
}
- pub fn active_workspace_index(&self) -> usize {
- self.active_workspace_index
+ pub fn project_group_keys(&self) -> impl Iterator<Item = &ProjectGroupKey> {
+ self.project_group_keys.iter()
}
- /// Adds a workspace to this window without changing which workspace is
- /// active.
- pub fn add(&mut self, workspace: Entity<Workspace>, window: &Window, cx: &mut Context<Self>) {
- if !self.multi_workspace_enabled(cx) {
- self.set_single_workspace(workspace, cx);
- return;
+ /// Returns the project groups, ordered by most recently added.
+ pub fn project_groups(
+ &self,
+ cx: &App,
+ ) -> impl Iterator<Item = (ProjectGroupKey, Vec<Entity<Workspace>>)> {
+ let mut groups = self
+ .project_group_keys
+ .iter()
+ .rev()
+ .map(|key| (key.clone(), Vec::new()))
+ .collect::<Vec<_>>();
+ for workspace in &self.workspaces {
+ let key = workspace.read(cx).project_group_key(cx);
+ if let Some((_, workspaces)) = groups.iter_mut().find(|(k, _)| k == &key) {
+ workspaces.push(workspace.clone());
+ }
}
+ groups.into_iter()
+ }
- self.insert_workspace(workspace, window, cx);
+ pub fn workspaces_for_project_group(
+ &self,
+ project_group_key: &ProjectGroupKey,
+ cx: &App,
+ ) -> impl Iterator<Item = &Entity<Workspace>> {
+ self.workspaces
+ .iter()
+ .filter(move |ws| ws.read(cx).project_group_key(cx) == *project_group_key)
}
- /// Ensures the workspace is in the multiworkspace and makes it the active one.
- pub fn activate(
+ pub fn remove_folder_from_project_group(
&mut self,
- workspace: Entity<Workspace>,
- window: &mut Window,
+ project_group_key: &ProjectGroupKey,
+ path: &Path,
cx: &mut Context<Self>,
) {
- if !self.multi_workspace_enabled(cx) {
- self.set_single_workspace(workspace, cx);
+ let new_path_list = project_group_key.path_list().without_path(path);
+ if new_path_list.is_empty() {
return;
}
- let index = self.insert_workspace(workspace, &*window, cx);
- let changed = self.active_workspace_index != index;
- self.active_workspace_index = index;
- if changed {
- cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged);
- self.serialize(cx);
+ let new_key = ProjectGroupKey::new(project_group_key.host(), new_path_list);
+
+ let workspaces: Vec<_> = self
+ .workspaces_for_project_group(project_group_key, cx)
+ .cloned()
+ .collect();
+
+ self.add_project_group_key(new_key);
+
+ for workspace in workspaces {
+ let project = workspace.read(cx).project().clone();
+ project.update(cx, |project, cx| {
+ project.remove_worktree_for_main_worktree_path(path, cx);
+ });
}
- self.focus_active_workspace(window, cx);
+
+ self.serialize(cx);
cx.notify();
}
- /// Replaces the currently active workspace with a new one. If the
- /// workspace is already in the list, this just switches to it.
- pub fn replace(
+ pub fn prompt_to_add_folders_to_project_group(
&mut self,
- workspace: Entity<Workspace>,
- window: &Window,
+ key: &ProjectGroupKey,
+ window: &mut Window,
cx: &mut Context<Self>,
) {
- if !self.multi_workspace_enabled(cx) {
- self.set_single_workspace(workspace, cx);
- return;
+ let paths = self.workspace().update(cx, |workspace, cx| {
+ workspace.prompt_for_open_path(
+ PathPromptOptions {
+ files: false,
+ directories: true,
+ multiple: true,
+ prompt: None,
+ },
+ DirectoryLister::Project(workspace.project().clone()),
+ window,
+ cx,
+ )
+ });
+
+ let key = key.clone();
+ cx.spawn_in(window, async move |this, cx| {
+ if let Some(new_paths) = paths.await.ok().flatten() {
+ if !new_paths.is_empty() {
+ this.update(cx, |multi_workspace, cx| {
+ multi_workspace.add_folders_to_project_group(&key, new_paths, cx);
+ })?;
+ }
+ }
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
+ }
+
+ pub fn add_folders_to_project_group(
+ &mut self,
+ project_group_key: &ProjectGroupKey,
+ new_paths: Vec<PathBuf>,
+ cx: &mut Context<Self>,
+ ) {
+ let mut all_paths: Vec<PathBuf> = project_group_key.path_list().paths().to_vec();
+ all_paths.extend(new_paths.iter().cloned());
+ let new_path_list = PathList::new(&all_paths);
+ let new_key = ProjectGroupKey::new(project_group_key.host(), new_path_list);
+
+ let workspaces: Vec<_> = self
+ .workspaces_for_project_group(project_group_key, cx)
+ .cloned()
+ .collect();
+
+ self.add_project_group_key(new_key);
+
+ for workspace in workspaces {
+ let project = workspace.read(cx).project().clone();
+ for path in &new_paths {
+ project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree(path, true, cx)
+ })
+ .detach_and_log_err(cx);
+ }
}
- if let Some(index) = self.workspaces.iter().position(|w| *w == workspace) {
- let changed = self.active_workspace_index != index;
- self.active_workspace_index = index;
- if changed {
- cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged);
- self.serialize(cx);
+ self.serialize(cx);
+ cx.notify();
+ }
+
+ pub fn remove_project_group(
+ &mut self,
+ key: &ProjectGroupKey,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.project_group_keys.retain(|k| k != key);
+
+ let workspaces: Vec<_> = self
+ .workspaces_for_project_group(key, cx)
+ .cloned()
+ .collect();
+ for workspace in workspaces {
+ self.remove(&workspace, window, cx);
+ }
+
+ self.serialize(cx);
+ cx.notify();
+ }
+
+ /// Finds an existing workspace in this multi-workspace whose paths match,
+ /// or creates a new one (deserializing its saved state from the database).
+ /// Never searches other windows or matches workspaces with a superset of
+ /// the requested paths.
+ pub fn find_or_create_local_workspace(
+ &mut self,
+ path_list: PathList,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Entity<Workspace>>> {
+ if let Some(workspace) = self
+ .workspaces
+ .iter()
+ .find(|ws| PathList::new(&ws.read(cx).root_paths(cx)) == path_list)
+ .cloned()
+ {
+ self.activate(workspace.clone(), window, cx);
+ return Task::ready(Ok(workspace));
+ }
+
+ if let Some(transient) = self.active_workspace.transient_workspace() {
+ if transient.read(cx).project_group_key(cx).path_list() == &path_list {
+ return Task::ready(Ok(transient.clone()));
}
- cx.notify();
- return;
}
- let old_workspace = std::mem::replace(
- &mut self.workspaces[self.active_workspace_index],
- workspace.clone(),
- );
+ let paths = path_list.paths().to_vec();
+ let app_state = self.workspace().read(cx).app_state().clone();
+ let requesting_window = window.window_handle().downcast::<MultiWorkspace>();
+
+ cx.spawn(async move |_this, cx| {
+ let result = cx
+ .update(|cx| {
+ Workspace::new_local(
+ paths,
+ app_state,
+ requesting_window,
+ None,
+ None,
+ OpenMode::Activate,
+ cx,
+ )
+ })
+ .await?;
+ Ok(result.workspace)
+ })
+ }
- let old_entity_id = old_workspace.entity_id();
- self.detach_workspace(&old_workspace, cx);
+ pub fn workspace(&self) -> &Entity<Workspace> {
+ match &self.active_workspace {
+ ActiveWorkspace::Persistent(index) => &self.workspaces[*index],
+ ActiveWorkspace::Transient(workspace) => workspace,
+ }
+ }
- Self::subscribe_to_workspace(&workspace, window, cx);
- self.sync_sidebar_to_workspace(&workspace, cx);
+ pub fn workspaces(&self) -> impl Iterator<Item = &Entity<Workspace>> {
+ self.workspaces
+ .iter()
+ .chain(self.active_workspace.transient_workspace())
+ }
+
+ /// Adds a workspace to this window as persistent without changing which
+ /// workspace is active. Unlike `activate()`, this always inserts into the
+ /// persistent list regardless of sidebar state — it's used for system-
+ /// initiated additions like deserialization and worktree discovery.
+ pub fn add(&mut self, workspace: Entity<Workspace>, window: &Window, cx: &mut Context<Self>) {
+ self.insert_workspace(workspace, window, cx);
+ }
+
+ /// Ensures the workspace is in the multiworkspace and makes it the active one.
+ pub fn activate(
+ &mut self,
+ workspace: Entity<Workspace>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ // Re-activating the current workspace is a no-op.
+ if self.workspace() == &workspace {
+ self.focus_active_workspace(window, cx);
+ return;
+ }
+
+ // Resolve where we're going.
+ let new_index = if let Some(index) = self.workspaces.iter().position(|w| *w == workspace) {
+ Some(index)
+ } else if self.sidebar_open {
+ Some(self.insert_workspace(workspace.clone(), &*window, cx))
+ } else {
+ None
+ };
+
+ // Transition the active workspace.
+ if let Some(index) = new_index {
+ if let Some(old) = self.active_workspace.set_persistent(index) {
+ if self.sidebar_open {
+ self.promote_transient(old, cx);
+ } else {
+ self.detach_workspace(&old, cx);
+ cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id()));
+ }
+ }
+ } else {
+ Self::subscribe_to_workspace(&workspace, window, cx);
+ let weak_self = cx.weak_entity();
+ workspace.update(cx, |workspace, cx| {
+ workspace.set_multi_workspace(weak_self, cx);
+ });
+ if let Some(old) = self.active_workspace.set_transient(workspace) {
+ self.detach_workspace(&old, cx);
+ cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id()));
+ }
+ }
- cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old_entity_id));
- cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace));
cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged);
self.serialize(cx);
+ self.focus_active_workspace(window, cx);
cx.notify();
}
- fn set_single_workspace(&mut self, workspace: Entity<Workspace>, cx: &mut Context<Self>) {
- self.workspaces[0] = workspace;
- self.active_workspace_index = 0;
- cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged);
+ /// Promotes a former transient workspace into the persistent list.
+ /// Returns the index of the newly inserted workspace.
+ fn promote_transient(&mut self, workspace: Entity<Workspace>, cx: &mut Context<Self>) -> usize {
+ let project_group_key = workspace.read(cx).project().read(cx).project_group_key(cx);
+ self.add_project_group_key(project_group_key);
+ self.workspaces.push(workspace.clone());
+ cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace));
+ self.workspaces.len() - 1
+ }
+
+ /// Collapses to a single transient workspace, discarding all persistent
+ /// workspaces. Used when multi-workspace is disabled (e.g. disable_ai).
+ fn collapse_to_single_workspace(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ if self.sidebar_open {
+ self.close_sidebar(window, cx);
+ }
+ let active = self.workspace().clone();
+ for workspace in std::mem::take(&mut self.workspaces) {
+ if workspace != active {
+ self.detach_workspace(&workspace, cx);
+ cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id()));
+ }
+ }
+ self.project_group_keys.clear();
+ self.active_workspace = ActiveWorkspace::Transient(active);
cx.notify();
}
@@ -553,12 +848,16 @@ impl MultiWorkspace {
if let Some(index) = self.workspaces.iter().position(|w| *w == workspace) {
index
} else {
+ let project_group_key = workspace.read(cx).project().read(cx).project_group_key(cx);
+
Self::subscribe_to_workspace(&workspace, window, cx);
self.sync_sidebar_to_workspace(&workspace, cx);
let weak_self = cx.weak_entity();
workspace.update(cx, |workspace, cx| {
workspace.set_multi_workspace(weak_self, cx);
});
+
+ self.add_project_group_key(project_group_key);
self.workspaces.push(workspace.clone());
cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace));
cx.notify();
@@ -589,7 +888,7 @@ impl MultiWorkspace {
}
fn sync_sidebar_to_workspace(&self, workspace: &Entity<Workspace>, cx: &mut Context<Self>) {
- if self.sidebar_open {
+ if self.sidebar_open() {
let sidebar_focus_handle = self.sidebar.as_ref().map(|s| s.focus_handle(cx));
workspace.update(cx, |workspace, _| {
workspace.set_sidebar_focus_handle(sidebar_focus_handle);
@@ -597,36 +896,17 @@ impl MultiWorkspace {
}
}
- fn cycle_workspace(&mut self, delta: isize, window: &mut Window, cx: &mut Context<Self>) {
- let count = self.workspaces.len() as isize;
- if count <= 1 {
- return;
- }
- let current = self.active_workspace_index as isize;
- let next = ((current + delta).rem_euclid(count)) as usize;
- let workspace = self.workspaces[next].clone();
- self.activate(workspace, window, cx);
- }
-
- fn next_workspace(&mut self, _: &NextWorkspace, window: &mut Window, cx: &mut Context<Self>) {
- self.cycle_workspace(1, window, cx);
- }
-
- fn previous_workspace(
- &mut self,
- _: &PreviousWorkspace,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- self.cycle_workspace(-1, window, cx);
- }
-
pub(crate) fn serialize(&mut self, cx: &mut Context<Self>) {
self._serialize_task = Some(cx.spawn(async move |this, cx| {
let Some((window_id, state)) = this
.read_with(cx, |this, cx| {
- let state = crate::persistence::model::MultiWorkspaceState {
+ let state = MultiWorkspaceState {
active_workspace_id: this.workspace().read(cx).database_id(),
+ project_group_keys: this
+ .project_group_keys()
+ .cloned()
+ .map(Into::into)
+ .collect::<Vec<_>>(),
sidebar_open: this.sidebar_open,
sidebar_state: this.sidebar.as_ref().and_then(|s| s.serialized_state(cx)),
};
@@ -841,26 +1121,82 @@ impl MultiWorkspace {
let Some(index) = self.workspaces.iter().position(|w| w == workspace) else {
return false;
};
+
+ let old_key = workspace.read(cx).project_group_key(cx);
+
if self.workspaces.len() <= 1 {
- return false;
- }
+ let has_worktrees = workspace.read(cx).visible_worktrees(cx).next().is_some();
+
+ if !has_worktrees {
+ return false;
+ }
- let removed_workspace = self.workspaces.remove(index);
+ let old_workspace = workspace.clone();
+ let old_entity_id = old_workspace.entity_id();
- if self.active_workspace_index >= self.workspaces.len() {
- self.active_workspace_index = self.workspaces.len() - 1;
- } else if self.active_workspace_index > index {
- self.active_workspace_index -= 1;
+ let app_state = old_workspace.read(cx).app_state().clone();
+
+ let project = Project::local(
+ app_state.client.clone(),
+ app_state.node_runtime.clone(),
+ app_state.user_store.clone(),
+ app_state.languages.clone(),
+ app_state.fs.clone(),
+ None,
+ project::LocalProjectFlags::default(),
+ cx,
+ );
+
+ let new_workspace = cx.new(|cx| Workspace::new(None, project, app_state, window, cx));
+
+ self.workspaces[0] = new_workspace.clone();
+ self.active_workspace = ActiveWorkspace::Persistent(0);
+
+ Self::subscribe_to_workspace(&new_workspace, window, cx);
+
+ self.sync_sidebar_to_workspace(&new_workspace, cx);
+
+ let weak_self = cx.weak_entity();
+
+ new_workspace.update(cx, |workspace, cx| {
+ workspace.set_multi_workspace(weak_self, cx);
+ });
+
+ self.detach_workspace(&old_workspace, cx);
+
+ cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old_entity_id));
+ cx.emit(MultiWorkspaceEvent::WorkspaceAdded(new_workspace));
+ cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged);
+ } else {
+ let removed_workspace = self.workspaces.remove(index);
+
+ if let Some(active_index) = self.active_workspace.persistent_index() {
+ if active_index >= self.workspaces.len() {
+ self.active_workspace = ActiveWorkspace::Persistent(self.workspaces.len() - 1);
+ } else if active_index > index {
+ self.active_workspace = ActiveWorkspace::Persistent(active_index - 1);
+ }
+ }
+
+ self.detach_workspace(&removed_workspace, cx);
+
+ cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(
+ removed_workspace.entity_id(),
+ ));
+ cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged);
}
- self.detach_workspace(&removed_workspace, cx);
+ let key_still_in_use = self
+ .workspaces
+ .iter()
+ .any(|ws| ws.read(cx).project_group_key(cx) == old_key);
+
+ if !key_still_in_use {
+ self.project_group_keys.retain(|k| k != &old_key);
+ }
self.serialize(cx);
self.focus_active_workspace(window, cx);
- cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(
- removed_workspace.entity_id(),
- ));
- cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged);
cx.notify();
true
@@ -877,7 +1213,7 @@ impl MultiWorkspace {
return;
}
- let app_state: Arc<crate::AppState> = workspace.read(cx).app_state().clone();
+ let app_state: Arc<AppState> = workspace.read(cx).app_state().clone();
cx.defer(move |cx| {
let options = (app_state.build_window_options)(None, cx);
@@ -894,6 +1230,58 @@ impl MultiWorkspace {
});
}
+ pub fn move_project_group_to_new_window(
+ &mut self,
+ key: &ProjectGroupKey,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let workspaces: Vec<_> = self
+ .workspaces_for_project_group(key, cx)
+ .cloned()
+ .collect();
+ if workspaces.is_empty() {
+ return;
+ }
+
+ self.project_group_keys.retain(|k| k != key);
+
+ let mut removed = Vec::new();
+ for workspace in &workspaces {
+ if self.remove(workspace, window, cx) {
+ removed.push(workspace.clone());
+ }
+ }
+
+ if removed.is_empty() {
+ return;
+ }
+
+ let app_state = removed[0].read(cx).app_state().clone();
+
+ cx.defer(move |cx| {
+ let options = (app_state.build_window_options)(None, cx);
+
+ let first = removed[0].clone();
+ let rest = removed[1..].to_vec();
+
+ let Ok(new_window) = cx.open_window(options, |window, cx| {
+ cx.new(|cx| MultiWorkspace::new(first, window, cx))
+ }) else {
+ return;
+ };
+
+ new_window
+ .update(cx, |mw, window, cx| {
+ for workspace in rest {
+ mw.activate(workspace, window, cx);
+ }
+ window.activate_window();
+ })
+ .log_err();
+ });
+ }
+
fn move_active_workspace_to_new_window(
&mut self,
_: &MoveWorkspaceToNewWindow,
@@ -911,17 +1299,10 @@ impl MultiWorkspace {
window: &mut Window,
cx: &mut Context<Self>,
) -> Task<Result<Entity<Workspace>>> {
- let workspace = self.workspace().clone();
-
- let needs_close_prompt =
- open_mode == OpenMode::Replace || !self.multi_workspace_enabled(cx);
- let open_mode = if self.multi_workspace_enabled(cx) {
- open_mode
+ if self.multi_workspace_enabled(cx) {
+ self.find_or_create_local_workspace(PathList::new(&paths), window, cx)
} else {
- OpenMode::Replace
- };
-
- if needs_close_prompt {
+ let workspace = self.workspace().clone();
cx.spawn_in(window, async move |_this, cx| {
let should_continue = workspace
.update_in(cx, |workspace, window, cx| {
@@ -938,10 +1319,6 @@ impl MultiWorkspace {
Ok(workspace)
}
})
- } else {
- workspace.update(cx, |workspace, cx| {
- workspace.open_workspace_for_paths(open_mode, paths, window, cx)
- })
}
}
}
@@ -1048,8 +1425,6 @@ impl Render for MultiWorkspace {
this.focus_sidebar(window, cx);
},
))
- .on_action(cx.listener(Self::next_workspace))
- .on_action(cx.listener(Self::previous_workspace))
.on_action(cx.listener(Self::move_active_workspace_to_new_window))
.on_action(cx.listener(
|this: &mut Self, action: &ToggleThreadSwitcher, window, cx| {
@@ -2,7 +2,8 @@ use super::*;
use feature_flags::FeatureFlagAppExt;
use fs::FakeFs;
use gpui::TestAppContext;
-use project::DisableAiSettings;
+use project::{DisableAiSettings, ProjectGroupKey};
+use serde_json::json;
use settings::SettingsStore;
fn init_test(cx: &mut TestAppContext) {
@@ -87,86 +88,256 @@ async fn test_sidebar_disabled_when_disable_ai_is_enabled(cx: &mut TestAppContex
}
#[gpui::test]
-async fn test_replace(cx: &mut TestAppContext) {
+async fn test_project_group_keys_initial(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
- let project_a = Project::test(fs.clone(), [], cx).await;
- let project_b = Project::test(fs.clone(), [], cx).await;
- let project_c = Project::test(fs.clone(), [], cx).await;
- let project_d = Project::test(fs.clone(), [], cx).await;
+ fs.insert_tree("/root_a", json!({ "file.txt": "" })).await;
+ let project = Project::test(fs, ["/root_a".as_ref()], cx).await;
+
+ let expected_key = project.read_with(cx, |project, cx| project.project_group_key(cx));
let (multi_workspace, cx) =
- cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx));
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
+
+ multi_workspace.update(cx, |mw, cx| {
+ mw.open_sidebar(cx);
+ });
- let workspace_a_id = multi_workspace.read_with(cx, |mw, _cx| mw.workspaces()[0].entity_id());
+ multi_workspace.read_with(cx, |mw, _cx| {
+ let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect();
+ assert_eq!(keys.len(), 1, "should have exactly one key on creation");
+ assert_eq!(*keys[0], expected_key);
+ });
+}
- // Replace the only workspace (single-workspace case).
- let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| {
- let workspace = cx.new(|cx| Workspace::test_new(project_b.clone(), window, cx));
- mw.replace(workspace.clone(), &*window, cx);
- workspace
+#[gpui::test]
+async fn test_project_group_keys_add_workspace(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree("/root_a", json!({ "file.txt": "" })).await;
+ fs.insert_tree("/root_b", json!({ "file.txt": "" })).await;
+ let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await;
+ let project_b = Project::test(fs.clone(), ["/root_b".as_ref()], cx).await;
+
+ let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx));
+ let key_b = project_b.read_with(cx, |p, cx| p.project_group_key(cx));
+ assert_ne!(
+ key_a, key_b,
+ "different roots should produce different keys"
+ );
+
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx));
+
+ multi_workspace.update(cx, |mw, cx| {
+ mw.open_sidebar(cx);
+ });
+
+ multi_workspace.read_with(cx, |mw, _cx| {
+ assert_eq!(mw.project_group_keys().count(), 1);
+ });
+
+ // Adding a workspace with a different project root adds a new key.
+ multi_workspace.update_in(cx, |mw, window, cx| {
+ mw.test_add_workspace(project_b, window, cx);
});
multi_workspace.read_with(cx, |mw, _cx| {
- assert_eq!(mw.workspaces().len(), 1);
+ let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect();
assert_eq!(
- mw.workspaces()[0].entity_id(),
- workspace_b.entity_id(),
- "slot should now be project_b"
- );
- assert_ne!(
- mw.workspaces()[0].entity_id(),
- workspace_a_id,
- "project_a should be gone"
+ keys.len(),
+ 2,
+ "should have two keys after adding a second workspace"
);
+ assert_eq!(*keys[0], key_a);
+ assert_eq!(*keys[1], key_b);
});
+}
+
+#[gpui::test]
+async fn test_project_group_keys_duplicate_not_added(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree("/root_a", json!({ "file.txt": "" })).await;
+ let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await;
+ // A second project entity pointing at the same path produces the same key.
+ let project_a2 = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await;
+
+ let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx));
+ let key_a2 = project_a2.read_with(cx, |p, cx| p.project_group_key(cx));
+ assert_eq!(key_a, key_a2, "same root path should produce the same key");
+
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx));
- // Add project_c as a second workspace, then replace it with project_d.
- let workspace_c = multi_workspace.update_in(cx, |mw, window, cx| {
- mw.test_add_workspace(project_c.clone(), window, cx)
+ multi_workspace.update(cx, |mw, cx| {
+ mw.open_sidebar(cx);
+ });
+
+ multi_workspace.update_in(cx, |mw, window, cx| {
+ mw.test_add_workspace(project_a2, window, cx);
});
multi_workspace.read_with(cx, |mw, _cx| {
- assert_eq!(mw.workspaces().len(), 2);
- assert_eq!(mw.active_workspace_index(), 1);
+ let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect();
+ assert_eq!(
+ keys.len(),
+ 1,
+ "duplicate key should not be added when a workspace with the same root is inserted"
+ );
});
+}
+
+#[gpui::test]
+async fn test_project_group_keys_on_worktree_added(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree("/root_a", json!({ "file.txt": "" })).await;
+ fs.insert_tree("/root_b", json!({ "file.txt": "" })).await;
+ let project = Project::test(fs, ["/root_a".as_ref()], cx).await;
+
+ let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx));
+
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
- let workspace_d = multi_workspace.update_in(cx, |mw, window, cx| {
- let workspace = cx.new(|cx| Workspace::test_new(project_d.clone(), window, cx));
- mw.replace(workspace.clone(), &*window, cx);
- workspace
+ multi_workspace.update(cx, |mw, cx| {
+ mw.open_sidebar(cx);
});
+ // Add a second worktree to the same project.
+ let (worktree, _) = project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree("/root_b", true, cx)
+ })
+ .await
+ .unwrap();
+ worktree
+ .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
+ .await;
+ cx.run_until_parked();
+
+ let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx));
+ assert_ne!(
+ initial_key, updated_key,
+ "key should change after adding a worktree"
+ );
+
multi_workspace.read_with(cx, |mw, _cx| {
- assert_eq!(mw.workspaces().len(), 2, "should still have 2 workspaces");
- assert_eq!(mw.active_workspace_index(), 1);
+ let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect();
assert_eq!(
- mw.workspaces()[1].entity_id(),
- workspace_d.entity_id(),
- "active slot should now be project_d"
- );
- assert_ne!(
- mw.workspaces()[1].entity_id(),
- workspace_c.entity_id(),
- "project_c should be gone"
+ keys.len(),
+ 2,
+ "should have both the original and updated key"
);
+ assert_eq!(*keys[0], initial_key);
+ assert_eq!(*keys[1], updated_key);
});
+}
- // Replace with workspace_b which is already in the list — should just switch.
- multi_workspace.update_in(cx, |mw, window, cx| {
- mw.replace(workspace_b.clone(), &*window, cx);
+#[gpui::test]
+async fn test_project_group_keys_on_worktree_removed(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree("/root_a", json!({ "file.txt": "" })).await;
+ fs.insert_tree("/root_b", json!({ "file.txt": "" })).await;
+ let project = Project::test(fs, ["/root_a".as_ref(), "/root_b".as_ref()], cx).await;
+
+ let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx));
+
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+
+ multi_workspace.update(cx, |mw, cx| {
+ mw.open_sidebar(cx);
});
+ // Remove one worktree.
+ let worktree_b_id = project.read_with(cx, |project, cx| {
+ project
+ .worktrees(cx)
+ .find(|wt| wt.read(cx).root_name().as_unix_str() == "root_b")
+ .unwrap()
+ .read(cx)
+ .id()
+ });
+ project.update(cx, |project, cx| {
+ project.remove_worktree(worktree_b_id, cx);
+ });
+ cx.run_until_parked();
+
+ let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx));
+ assert_ne!(
+ initial_key, updated_key,
+ "key should change after removing a worktree"
+ );
+
multi_workspace.read_with(cx, |mw, _cx| {
+ let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect();
assert_eq!(
- mw.workspaces().len(),
+ keys.len(),
2,
- "no workspace should be added or removed"
+ "should accumulate both the original and post-removal key"
);
+ assert_eq!(*keys[0], initial_key);
+ assert_eq!(*keys[1], updated_key);
+ });
+}
+
+#[gpui::test]
+async fn test_project_group_keys_across_multiple_workspaces_and_worktree_changes(
+ cx: &mut TestAppContext,
+) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree("/root_a", json!({ "file.txt": "" })).await;
+ fs.insert_tree("/root_b", json!({ "file.txt": "" })).await;
+ fs.insert_tree("/root_c", json!({ "file.txt": "" })).await;
+ let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await;
+ let project_b = Project::test(fs.clone(), ["/root_b".as_ref()], cx).await;
+
+ let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx));
+ let key_b = project_b.read_with(cx, |p, cx| p.project_group_key(cx));
+
+ let (multi_workspace, cx) =
+ cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx));
+
+ multi_workspace.update(cx, |mw, cx| {
+ mw.open_sidebar(cx);
+ });
+
+ multi_workspace.update_in(cx, |mw, window, cx| {
+ mw.test_add_workspace(project_b, window, cx);
+ });
+
+ multi_workspace.read_with(cx, |mw, _cx| {
+ assert_eq!(mw.project_group_keys().count(), 2);
+ });
+
+ // Now add a worktree to project_a. This should produce a third key.
+ let (worktree, _) = project_a
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree("/root_c", true, cx)
+ })
+ .await
+ .unwrap();
+ worktree
+ .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
+ .await;
+ cx.run_until_parked();
+
+ let key_a_updated = project_a.read_with(cx, |p, cx| p.project_group_key(cx));
+ assert_ne!(key_a, key_a_updated);
+
+ multi_workspace.read_with(cx, |mw, _cx| {
+ let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect();
assert_eq!(
- mw.active_workspace_index(),
- 0,
- "should have switched to workspace_b"
+ keys.len(),
+ 3,
+ "should have key_a, key_b, and the updated key_a with root_c"
);
+ assert_eq!(*keys[0], key_a);
+ assert_eq!(*keys[1], key_b);
+ assert_eq!(*keys[2], key_a_updated);
});
}
@@ -1226,10 +1226,8 @@ where
let mut display = format!("{err:#}");
if !display.ends_with('\n') {
display.push('.');
- display.push(' ')
}
- let detail =
- f(err, window, cx).unwrap_or_else(|| format!("{display}Please try again."));
+ let detail = f(err, window, cx).unwrap_or(display);
window.prompt(PromptLevel::Critical, &msg, Some(&detail), &["Ok"], cx)
}) {
prompt.await.ok();
@@ -2,6 +2,7 @@ use crate::{
CloseWindow, NewFile, NewTerminal, OpenInTerminal, OpenOptions, OpenTerminal, OpenVisible,
SplitDirection, ToggleFileFinder, ToggleProjectSymbols, ToggleZoom, Workspace,
WorkspaceItemBuilder, ZoomIn, ZoomOut,
+ focus_follows_mouse::FocusFollowsMouse as _,
invalid_item_view::InvalidItemView,
item::{
ActivateOnClose, ClosePosition, Item, ItemBufferKind, ItemHandle, ItemSettings,
@@ -11,7 +12,7 @@ use crate::{
move_item,
notifications::NotifyResultExt,
toolbar::Toolbar,
- workspace_settings::{AutosaveSetting, TabBarSettings, WorkspaceSettings},
+ workspace_settings::{AutosaveSetting, FocusFollowsMouse, TabBarSettings, WorkspaceSettings},
};
use anyhow::Result;
use collections::{BTreeSet, HashMap, HashSet, VecDeque};
@@ -443,6 +444,7 @@ pub struct Pane {
pinned_tab_count: usize,
diagnostics: HashMap<ProjectPath, DiagnosticSeverity>,
zoom_out_on_close: bool,
+ focus_follows_mouse: FocusFollowsMouse,
diagnostic_summary_update: Task<()>,
/// If a certain project item wants to get recreated with specific data, it can persist its data before the recreation here.
pub project_item_restoration_data: HashMap<ProjectItemKind, Box<dyn Any + Send>>,
@@ -615,6 +617,7 @@ impl Pane {
pinned_tab_count: 0,
diagnostics: Default::default(),
zoom_out_on_close: true,
+ focus_follows_mouse: WorkspaceSettings::get_global(cx).focus_follows_mouse,
diagnostic_summary_update: Task::ready(()),
project_item_restoration_data: HashMap::default(),
welcome_page: None,
@@ -782,7 +785,6 @@ impl Pane {
fn settings_changed(&mut self, window: &mut Window, cx: &mut Context<Self>) {
let tab_bar_settings = TabBarSettings::get_global(cx);
- let new_max_tabs = WorkspaceSettings::get_global(cx).max_tabs;
if let Some(display_nav_history_buttons) = self.display_nav_history_buttons.as_mut() {
*display_nav_history_buttons = tab_bar_settings.show_nav_history_buttons;
@@ -795,6 +797,12 @@ impl Pane {
self.nav_history.0.lock().preview_item_id = None;
}
+ let workspace_settings = WorkspaceSettings::get_global(cx);
+
+ self.focus_follows_mouse = workspace_settings.focus_follows_mouse;
+
+ let new_max_tabs = workspace_settings.max_tabs;
+
if self.use_max_tabs && new_max_tabs != self.max_tabs {
self.max_tabs = new_max_tabs;
self.close_items_on_settings_change(window, cx);
@@ -3662,6 +3670,11 @@ impl Pane {
this.drag_split_direction = None;
this.handle_external_paths_drop(paths, window, cx)
}))
+ .on_click(cx.listener(move |this, event: &ClickEvent, window, cx| {
+ if event.click_count() == 2 {
+ window.dispatch_action(this.double_click_dispatch_action.boxed_clone(), cx);
+ }
+ }))
}
pub fn render_menu_overlay(menu: &Entity<ContextMenu>) -> Div {
@@ -4460,6 +4473,7 @@ impl Render for Pane {
placeholder.child(self.welcome_page.clone().unwrap())
}
}
+ .focus_follows_mouse(self.focus_follows_mouse, cx)
})
.child(
// drag target
@@ -4881,14 +4895,17 @@ impl Render for DraggedTab {
#[cfg(test)]
mod tests {
- use std::{cell::Cell, iter::zip, num::NonZero};
+ use std::{cell::Cell, iter::zip, num::NonZero, rc::Rc};
use super::*;
use crate::{
Member,
item::test::{TestItem, TestProjectItem},
};
- use gpui::{AppContext, Axis, TestAppContext, VisualTestContext, size};
+ use gpui::{
+ AppContext, Axis, Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent,
+ TestAppContext, VisualTestContext, size,
+ };
use project::FakeFs;
use settings::SettingsStore;
use theme::LoadThemes;
@@ -6613,8 +6630,6 @@ mod tests {
#[gpui::test]
async fn test_drag_tab_to_middle_tab_with_mouse_events(cx: &mut TestAppContext) {
- use gpui::{Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent};
-
init_test(cx);
let fs = FakeFs::new(cx.executor());
@@ -6666,8 +6681,6 @@ mod tests {
async fn test_drag_pinned_tab_when_show_pinned_tabs_in_separate_row_enabled(
cx: &mut TestAppContext,
) {
- use gpui::{Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent};
-
init_test(cx);
set_pinned_tabs_separate_row(cx, true);
let fs = FakeFs::new(cx.executor());
@@ -6743,8 +6756,6 @@ mod tests {
async fn test_drag_unpinned_tab_when_show_pinned_tabs_in_separate_row_enabled(
cx: &mut TestAppContext,
) {
- use gpui::{Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent};
-
init_test(cx);
set_pinned_tabs_separate_row(cx, true);
let fs = FakeFs::new(cx.executor());
@@ -6797,8 +6808,6 @@ mod tests {
async fn test_drag_mixed_tabs_when_show_pinned_tabs_in_separate_row_enabled(
cx: &mut TestAppContext,
) {
- use gpui::{Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent};
-
init_test(cx);
set_pinned_tabs_separate_row(cx, true);
let fs = FakeFs::new(cx.executor());
@@ -6864,8 +6873,6 @@ mod tests {
#[gpui::test]
async fn test_middle_click_pinned_tab_does_not_close(cx: &mut TestAppContext) {
- use gpui::{Modifiers, MouseButton, MouseDownEvent, MouseUpEvent};
-
init_test(cx);
let fs = FakeFs::new(cx.executor());
@@ -6935,6 +6942,74 @@ mod tests {
assert_item_labels(&pane, ["A*!"], cx);
}
+ #[gpui::test]
+ async fn test_double_click_pinned_tab_bar_empty_space_creates_new_tab(cx: &mut TestAppContext) {
+ init_test(cx);
+ let fs = FakeFs::new(cx.executor());
+
+ let project = Project::test(fs, None, cx).await;
+ let (workspace, cx) =
+ cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
+ let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
+
+ // The real NewFile handler lives in editor::init, which isn't initialized
+ // in workspace tests. Register a global action handler that sets a flag so
+ // we can verify the action is dispatched without depending on the editor crate.
+ // TODO: If editor::init is ever available in workspace tests, remove this
+ // flag and assert the resulting tab bar state directly instead.
+ let new_file_dispatched = Rc::new(Cell::new(false));
+ cx.update(|_, cx| {
+ let new_file_dispatched = new_file_dispatched.clone();
+ cx.on_action(move |_: &NewFile, _cx| {
+ new_file_dispatched.set(true);
+ });
+ });
+
+ set_pinned_tabs_separate_row(cx, true);
+
+ let item_a = add_labeled_item(&pane, "A", false, cx);
+ add_labeled_item(&pane, "B", false, cx);
+
+ pane.update_in(cx, |pane, window, cx| {
+ let ix = pane
+ .index_for_item_id(item_a.item_id())
+ .expect("item A should exist");
+ pane.pin_tab_at(ix, window, cx);
+ });
+ assert_item_labels(&pane, ["A!", "B*"], cx);
+ cx.run_until_parked();
+
+ let pinned_drop_target_bounds = cx
+ .debug_bounds("pinned_tabs_border")
+ .expect("pinned_tabs_border should have debug bounds");
+
+ cx.simulate_event(MouseDownEvent {
+ position: pinned_drop_target_bounds.center(),
+ button: MouseButton::Left,
+ modifiers: Modifiers::default(),
+ click_count: 2,
+ first_mouse: false,
+ });
+
+ cx.run_until_parked();
+
+ cx.simulate_event(MouseUpEvent {
+ position: pinned_drop_target_bounds.center(),
+ button: MouseButton::Left,
+ modifiers: Modifiers::default(),
+ click_count: 2,
+ });
+
+ cx.run_until_parked();
+
+ // TODO: If editor::init is ever available in workspace tests, replace this
+ // with an assert_item_labels check that verifies a new tab is actually created.
+ assert!(
+ new_file_dispatched.get(),
+ "Double-clicking pinned tab bar empty space should dispatch the new file action"
+ );
+ }
+
#[gpui::test]
async fn test_add_item_with_new_item(cx: &mut TestAppContext) {
init_test(cx);
@@ -1,6 +1,7 @@
use crate::{
AnyActiveCall, AppState, CollaboratorId, FollowerState, Pane, ParticipantLocation, Workspace,
WorkspaceSettings,
+ notifications::DetachAndPromptErr,
pane_group::element::pane_axis,
workspace_settings::{PaneSplitDirectionHorizontal, PaneSplitDirectionVertical},
};
@@ -438,14 +439,19 @@ impl PaneLeaderDecorator for PaneRenderContext<'_> {
let app_state = self.app_state.clone();
this.cursor_pointer().on_mouse_down(
MouseButton::Left,
- move |_, _, cx| {
+ move |_, window, cx| {
crate::join_in_room_project(
leader_project_id,
leader_user_id,
app_state.clone(),
cx,
)
- .detach_and_log_err(cx);
+ .detach_and_prompt_err(
+ "Failed to join project",
+ window,
+ cx,
+ |error, _, _| Some(format!("{error:#}")),
+ );
},
)
},
@@ -337,15 +337,20 @@ pub fn read_serialized_multi_workspaces(
window_groups
.into_iter()
- .map(|group| {
+ .filter_map(|group| {
let window_id = group.first().and_then(|sw| sw.window_id);
let state = window_id
.map(|wid| read_multi_workspace_state(wid, cx))
.unwrap_or_default();
- model::SerializedMultiWorkspace {
- workspaces: group,
+ let active_workspace = state
+ .active_workspace_id
+ .and_then(|id| group.iter().position(|ws| ws.workspace_id == id))
+ .or(Some(0))
+ .and_then(|index| group.into_iter().nth(index))?;
+ Some(model::SerializedMultiWorkspace {
+ active_workspace,
state,
- }
+ })
})
.collect()
}
@@ -2488,11 +2493,20 @@ pub fn delete_unloaded_items(
#[cfg(test)]
mod tests {
use super::*;
- use crate::persistence::model::{
- SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace, SessionWorkspace,
+ use crate::{
+ multi_workspace::MultiWorkspace,
+ persistence::{
+ model::{
+ SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace,
+ SessionWorkspace,
+ },
+ read_multi_workspace_state,
+ },
};
- use gpui;
+ use feature_flags::FeatureFlagAppExt;
+ use gpui::AppContext as _;
use pretty_assertions::assert_eq;
+ use project::{Project, ProjectGroupKey};
use remote::SshConnectionOptions;
use serde_json::json;
use std::{thread, time::Duration};
@@ -2507,12 +2521,6 @@ mod tests {
#[gpui::test]
async fn test_multi_workspace_serializes_on_add_and_remove(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use crate::persistence::read_multi_workspace_state;
- use feature_flags::FeatureFlagAppExt;
- use gpui::AppContext as _;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -2527,6 +2535,10 @@ mod tests {
let (multi_workspace, cx) =
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx));
+ multi_workspace.update(cx, |mw, cx| {
+ mw.open_sidebar(cx);
+ });
+
multi_workspace.update_in(cx, |mw, _, cx| {
mw.set_random_database_id(cx);
});
@@ -2556,7 +2568,7 @@ mod tests {
// --- Remove the second workspace (index 1) ---
multi_workspace.update_in(cx, |mw, window, cx| {
- let ws = mw.workspaces()[1].clone();
+ let ws = mw.workspaces().nth(1).unwrap().clone();
mw.remove(&ws, window, cx);
});
@@ -3993,6 +4005,7 @@ mod tests {
window_10,
MultiWorkspaceState {
active_workspace_id: Some(WorkspaceId(2)),
+ project_group_keys: vec![],
sidebar_open: true,
sidebar_state: None,
},
@@ -4004,6 +4017,7 @@ mod tests {
window_20,
MultiWorkspaceState {
active_workspace_id: Some(WorkspaceId(3)),
+ project_group_keys: vec![],
sidebar_open: false,
sidebar_state: None,
},
@@ -4040,35 +4054,30 @@ mod tests {
let results = cx.update(|cx| read_serialized_multi_workspaces(session_workspaces, cx));
- // Should produce 3 groups: window 10, window 20, and the orphan.
+ // Should produce 3 results: window 10, window 20, and the orphan.
assert_eq!(results.len(), 3);
- // Window 10 group: 2 workspaces, active_workspace_id = 2, sidebar open.
+ // Window 10: active_workspace_id = 2 picks workspace 2 (paths /b), sidebar open.
let group_10 = &results[0];
- assert_eq!(group_10.workspaces.len(), 2);
+ assert_eq!(group_10.active_workspace.workspace_id, WorkspaceId(2));
assert_eq!(group_10.state.active_workspace_id, Some(WorkspaceId(2)));
assert_eq!(group_10.state.sidebar_open, true);
- // Window 20 group: 1 workspace, active_workspace_id = 3, sidebar closed.
+ // Window 20: active_workspace_id = 3 picks workspace 3 (paths /c), sidebar closed.
let group_20 = &results[1];
- assert_eq!(group_20.workspaces.len(), 1);
+ assert_eq!(group_20.active_workspace.workspace_id, WorkspaceId(3));
assert_eq!(group_20.state.active_workspace_id, Some(WorkspaceId(3)));
assert_eq!(group_20.state.sidebar_open, false);
- // Orphan group: no window_id, so state is default.
+ // Orphan: no active_workspace_id, falls back to first workspace (id 4).
let group_none = &results[2];
- assert_eq!(group_none.workspaces.len(), 1);
+ assert_eq!(group_none.active_workspace.workspace_id, WorkspaceId(4));
assert_eq!(group_none.state.active_workspace_id, None);
assert_eq!(group_none.state.sidebar_open, false);
}
#[gpui::test]
async fn test_flush_serialization_completes_before_quit(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
-
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4114,12 +4123,6 @@ mod tests {
#[gpui::test]
async fn test_create_workspace_serialization(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use crate::persistence::read_multi_workspace_state;
- use feature_flags::FeatureFlagAppExt;
-
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4177,11 +4180,6 @@ mod tests {
#[gpui::test]
async fn test_remove_workspace_clears_session_binding(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
- use gpui::AppContext as _;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4197,6 +4195,10 @@ mod tests {
let (multi_workspace, cx) =
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx));
+ multi_workspace.update(cx, |mw, cx| {
+ mw.open_sidebar(cx);
+ });
+
multi_workspace.update_in(cx, |mw, _, cx| {
mw.set_random_database_id(cx);
});
@@ -4239,7 +4241,7 @@ mod tests {
// Remove workspace at index 1 (the second workspace).
multi_workspace.update_in(cx, |mw, window, cx| {
- let ws = mw.workspaces()[1].clone();
+ let ws = mw.workspaces().nth(1).unwrap().clone();
mw.remove(&ws, window, cx);
});
@@ -4269,11 +4271,6 @@ mod tests {
#[gpui::test]
async fn test_remove_workspace_not_restored_as_zombie(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
- use gpui::AppContext as _;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4299,6 +4296,10 @@ mod tests {
let (multi_workspace, cx) =
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx));
+ multi_workspace.update(cx, |mw, cx| {
+ mw.open_sidebar(cx);
+ });
+
multi_workspace.update_in(cx, |mw, _, cx| {
mw.workspace().update(cx, |ws, _cx| {
ws.set_database_id(ws1_id);
@@ -4350,7 +4351,7 @@ mod tests {
// Remove workspace2 (index 1).
multi_workspace.update_in(cx, |mw, window, cx| {
- let ws = mw.workspaces()[1].clone();
+ let ws = mw.workspaces().nth(1).unwrap().clone();
mw.remove(&ws, window, cx);
});
@@ -4376,11 +4377,6 @@ mod tests {
#[gpui::test]
async fn test_pending_removal_tasks_drained_on_flush(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
- use gpui::AppContext as _;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4401,6 +4397,10 @@ mod tests {
let (multi_workspace, cx) =
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx));
+ multi_workspace.update(cx, |mw, cx| {
+ mw.open_sidebar(cx);
+ });
+
multi_workspace.update_in(cx, |mw, _, cx| {
mw.set_random_database_id(cx);
});
@@ -4434,7 +4434,7 @@ mod tests {
// Remove workspace2 — this pushes a task to pending_removal_tasks.
multi_workspace.update_in(cx, |mw, window, cx| {
- let ws = mw.workspaces()[1].clone();
+ let ws = mw.workspaces().nth(1).unwrap().clone();
mw.remove(&ws, window, cx);
});
@@ -4443,7 +4443,6 @@ mod tests {
let all_tasks = multi_workspace.update_in(cx, |mw, window, cx| {
let mut tasks: Vec<Task<()>> = mw
.workspaces()
- .iter()
.map(|workspace| {
workspace.update(cx, |workspace, cx| {
workspace.flush_serialization(window, cx)
@@ -4482,10 +4481,6 @@ mod tests {
#[gpui::test]
async fn test_create_workspace_bounds_observer_uses_fresh_id(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4543,10 +4538,6 @@ mod tests {
#[gpui::test]
async fn test_flush_serialization_writes_bounds(cx: &mut gpui::TestAppContext) {
- use crate::multi_workspace::MultiWorkspace;
- use feature_flags::FeatureFlagAppExt;
- use project::Project;
-
crate::tests::init_test(cx);
cx.update(|cx| {
@@ -4702,4 +4693,223 @@ mod tests {
assert_eq!(result[2].2.paths(), &[PathBuf::from("/plain-project")]);
assert_eq!(result[2].0, WorkspaceId(4));
}
+
+ #[gpui::test]
+ async fn test_restore_window_with_linked_worktree_and_multiple_project_groups(
+ cx: &mut gpui::TestAppContext,
+ ) {
+ crate::tests::init_test(cx);
+
+ cx.update(|cx| {
+ cx.set_staff(true);
+ cx.update_flags(true, vec!["agent-v2".to_string()]);
+ });
+
+ let fs = fs::FakeFs::new(cx.executor());
+
+ // Main git repo at /repo
+ fs.insert_tree(
+ "/repo",
+ json!({
+ ".git": {
+ "HEAD": "ref: refs/heads/main",
+ "worktrees": {
+ "feature": {
+ "commondir": "../../",
+ "HEAD": "ref: refs/heads/feature"
+ }
+ }
+ },
+ "src": { "main.rs": "" }
+ }),
+ )
+ .await;
+
+ // Linked worktree checkout pointing back to /repo
+ fs.insert_tree(
+ "/worktree-feature",
+ json!({
+ ".git": "gitdir: /repo/.git/worktrees/feature",
+ "src": { "lib.rs": "" }
+ }),
+ )
+ .await;
+
+ // --- Phase 1: Set up the original multi-workspace window ---
+
+ let project_1 = Project::test(fs.clone(), ["/repo".as_ref()], cx).await;
+ let project_1_linked_worktree =
+ Project::test(fs.clone(), ["/worktree-feature".as_ref()], cx).await;
+
+ // Wait for git discovery to finish.
+ cx.run_until_parked();
+
+ // Create a second, unrelated project so we have two distinct project groups.
+ fs.insert_tree(
+ "/other-project",
+ json!({
+ ".git": { "HEAD": "ref: refs/heads/main" },
+ "readme.md": ""
+ }),
+ )
+ .await;
+ let project_2 = Project::test(fs.clone(), ["/other-project".as_ref()], cx).await;
+ cx.run_until_parked();
+
+ // Create the MultiWorkspace with project_2, then add the main repo
+ // and its linked worktree. The linked worktree is added last and
+ // becomes the active workspace.
+ let (multi_workspace, cx) = cx
+ .add_window_view(|window, cx| MultiWorkspace::test_new(project_2.clone(), window, cx));
+
+ multi_workspace.update(cx, |mw, cx| {
+ mw.open_sidebar(cx);
+ });
+
+ multi_workspace.update_in(cx, |mw, window, cx| {
+ mw.test_add_workspace(project_1.clone(), window, cx);
+ });
+
+ let workspace_worktree = multi_workspace.update_in(cx, |mw, window, cx| {
+ mw.test_add_workspace(project_1_linked_worktree.clone(), window, cx)
+ });
+
+ // Assign database IDs and set up session bindings so serialization
+ // writes real rows.
+ multi_workspace.update_in(cx, |mw, _, cx| {
+ for workspace in mw.workspaces() {
+ workspace.update(cx, |ws, _cx| {
+ ws.set_random_database_id();
+ });
+ }
+ });
+
+ // Flush serialization for each individual workspace (writes to SQLite)
+ // and for the MultiWorkspace (writes to KVP).
+ let tasks = multi_workspace.update_in(cx, |mw, window, cx| {
+ let session_id = mw.workspace().read(cx).session_id();
+ let window_id_u64 = window.window_handle().window_id().as_u64();
+
+ let mut tasks: Vec<Task<()>> = Vec::new();
+ for workspace in mw.workspaces() {
+ tasks.push(workspace.update(cx, |ws, cx| ws.flush_serialization(window, cx)));
+ if let Some(db_id) = workspace.read(cx).database_id() {
+ let db = WorkspaceDb::global(cx);
+ let session_id = session_id.clone();
+ tasks.push(cx.background_spawn(async move {
+ db.set_session_binding(db_id, session_id, Some(window_id_u64))
+ .await
+ .log_err();
+ }));
+ }
+ }
+ mw.serialize(cx);
+ tasks
+ });
+ cx.run_until_parked();
+ for task in tasks {
+ task.await;
+ }
+ cx.run_until_parked();
+
+ let active_db_id = workspace_worktree.read_with(cx, |ws, _| ws.database_id());
+ assert!(
+ active_db_id.is_some(),
+ "Active workspace should have a database ID"
+ );
+
+ // --- Phase 2: Read back and verify the serialized state ---
+
+ let session_id = multi_workspace
+ .read_with(cx, |mw, cx| mw.workspace().read(cx).session_id())
+ .unwrap();
+ let db = cx.update(|_, cx| WorkspaceDb::global(cx));
+ let session_workspaces = db
+ .last_session_workspace_locations(&session_id, None, fs.as_ref())
+ .await
+ .expect("should load session workspaces");
+ assert!(
+ !session_workspaces.is_empty(),
+ "Should have at least one session workspace"
+ );
+
+ let multi_workspaces =
+ cx.update(|_, cx| read_serialized_multi_workspaces(session_workspaces, cx));
+ assert_eq!(
+ multi_workspaces.len(),
+ 1,
+ "All workspaces share one window, so there should be exactly one multi-workspace"
+ );
+
+ let serialized = &multi_workspaces[0];
+ assert_eq!(
+ serialized.active_workspace.workspace_id,
+ active_db_id.unwrap(),
+ );
+ assert_eq!(serialized.state.project_group_keys.len(), 2,);
+
+ // Verify the serialized project group keys round-trip back to the
+ // originals.
+ let restored_keys: Vec<ProjectGroupKey> = serialized
+ .state
+ .project_group_keys
+ .iter()
+ .cloned()
+ .map(Into::into)
+ .collect();
+ let expected_keys = vec![
+ ProjectGroupKey::new(None, PathList::new(&["/other-project"])),
+ ProjectGroupKey::new(None, PathList::new(&["/repo"])),
+ ];
+ assert_eq!(
+ restored_keys, expected_keys,
+ "Deserialized project group keys should match the originals"
+ );
+
+ // --- Phase 3: Restore the window and verify the result ---
+
+ let app_state =
+ multi_workspace.read_with(cx, |mw, cx| mw.workspace().read(cx).app_state().clone());
+
+ let serialized_mw = multi_workspaces.into_iter().next().unwrap();
+ let restored_handle: gpui::WindowHandle<MultiWorkspace> = cx
+ .update(|_, cx| {
+ cx.spawn(async move |mut cx| {
+ crate::restore_multiworkspace(serialized_mw, app_state, &mut cx).await
+ })
+ })
+ .await
+ .expect("restore_multiworkspace should succeed");
+
+ cx.run_until_parked();
+
+ // The restored window should have the same project group keys.
+ let restored_keys: Vec<ProjectGroupKey> = restored_handle
+ .read_with(cx, |mw: &MultiWorkspace, _cx| {
+ mw.project_group_keys().cloned().collect()
+ })
+ .unwrap();
+ assert_eq!(
+ restored_keys, expected_keys,
+ "Restored window should have the same project group keys as the original"
+ );
+
+ // The active workspace in the restored window should have the linked
+ // worktree paths.
+ let active_paths: Vec<PathBuf> = restored_handle
+ .read_with(cx, |mw: &MultiWorkspace, cx| {
+ mw.workspace()
+ .read(cx)
+ .root_paths(cx)
+ .into_iter()
+ .map(|p: Arc<Path>| p.to_path_buf())
+ .collect()
+ })
+ .unwrap();
+ assert_eq!(
+ active_paths,
+ vec![PathBuf::from("/worktree-feature")],
+ "The restored active workspace should be the linked worktree project"
+ );
+ }
}
@@ -13,7 +13,7 @@ use db::sqlez::{
use gpui::{AsyncWindowContext, Entity, WeakEntity, WindowId};
use language::{Toolchain, ToolchainScope};
-use project::{Project, debugger::breakpoint_store::SourceBreakpoint};
+use project::{Project, ProjectGroupKey, debugger::breakpoint_store::SourceBreakpoint};
use remote::RemoteConnectionOptions;
use serde::{Deserialize, Serialize};
use std::{
@@ -21,7 +21,7 @@ use std::{
path::{Path, PathBuf},
sync::Arc,
};
-use util::ResultExt;
+use util::{ResultExt, path_list::SerializedPathList};
use uuid::Uuid;
#[derive(
@@ -36,7 +36,7 @@ pub(crate) enum RemoteConnectionKind {
Docker,
}
-#[derive(Debug, PartialEq, Clone)]
+#[derive(Debug, PartialEq, Clone, serde::Serialize, serde::Deserialize)]
pub enum SerializedWorkspaceLocation {
Local,
Remote(RemoteConnectionOptions),
@@ -59,21 +59,51 @@ pub struct SessionWorkspace {
pub window_id: Option<WindowId>,
}
+#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
+pub struct SerializedProjectGroupKey {
+ pub path_list: SerializedPathList,
+ pub(crate) location: SerializedWorkspaceLocation,
+}
+
+impl From<ProjectGroupKey> for SerializedProjectGroupKey {
+ fn from(value: ProjectGroupKey) -> Self {
+ SerializedProjectGroupKey {
+ path_list: value.path_list().serialize(),
+ location: match value.host() {
+ Some(host) => SerializedWorkspaceLocation::Remote(host),
+ None => SerializedWorkspaceLocation::Local,
+ },
+ }
+ }
+}
+
+impl From<SerializedProjectGroupKey> for ProjectGroupKey {
+ fn from(value: SerializedProjectGroupKey) -> Self {
+ let path_list = PathList::deserialize(&value.path_list);
+ let host = match value.location {
+ SerializedWorkspaceLocation::Local => None,
+ SerializedWorkspaceLocation::Remote(opts) => Some(opts),
+ };
+ ProjectGroupKey::new(host, path_list)
+ }
+}
+
/// Per-window state for a MultiWorkspace, persisted to KVP.
#[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)]
pub struct MultiWorkspaceState {
pub active_workspace_id: Option<WorkspaceId>,
pub sidebar_open: bool,
+ pub project_group_keys: Vec<SerializedProjectGroupKey>,
#[serde(default)]
pub sidebar_state: Option<String>,
}
/// The serialized state of a single MultiWorkspace window from a previous session:
-/// all workspaces that shared the window, which one was active, and whether the
-/// sidebar was open.
+/// the active workspace to restore plus window-level state (project group keys,
+/// sidebar).
#[derive(Debug, Clone)]
pub struct SerializedMultiWorkspace {
- pub workspaces: Vec<SessionWorkspace>,
+ pub active_workspace: SessionWorkspace,
pub state: MultiWorkspaceState,
}
@@ -55,6 +55,7 @@ pub struct SearchOptions {
/// Specifies whether the supports search & replace.
pub replacement: bool,
pub selection: bool,
+ pub select_all: bool,
pub find_in_results: bool,
}
@@ -78,6 +79,7 @@ pub trait SearchableItem: Item + EventEmitter<SearchEvent> {
regex: true,
replacement: true,
selection: true,
+ select_all: true,
find_in_results: false,
}
}
@@ -7,7 +7,7 @@ use std::{
};
use collections::{HashMap, HashSet};
-use gpui::{DismissEvent, EventEmitter, FocusHandle, Focusable, WeakEntity};
+use gpui::{DismissEvent, EventEmitter, FocusHandle, Focusable, ScrollHandle, WeakEntity};
use project::{
WorktreeId,
@@ -17,7 +17,8 @@ use project::{
use smallvec::SmallVec;
use theme::ActiveTheme;
use ui::{
- AlertModal, Checkbox, FluentBuilder, KeyBinding, ListBulletItem, ToggleState, prelude::*,
+ AlertModal, Checkbox, FluentBuilder, KeyBinding, ListBulletItem, ToggleState, WithScrollbar,
+ prelude::*,
};
use crate::{DismissDecision, ModalView, ToggleWorktreeSecurity};
@@ -29,6 +30,7 @@ pub struct SecurityModal {
worktree_store: WeakEntity<WorktreeStore>,
remote_host: Option<RemoteHostLocation>,
focus_handle: FocusHandle,
+ project_list_scroll_handle: ScrollHandle,
trusted: Option<bool>,
}
@@ -63,16 +65,17 @@ impl ModalView for SecurityModal {
}
impl Render for SecurityModal {
- fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
if self.restricted_paths.is_empty() {
self.dismiss(cx);
return v_flex().into_any_element();
}
- let header_label = if self.restricted_paths.len() == 1 {
- "Unrecognized Project"
+ let restricted_count = self.restricted_paths.len();
+ let header_label: SharedString = if restricted_count == 1 {
+ "Unrecognized Project".into()
} else {
- "Unrecognized Projects"
+ format!("Unrecognized Projects ({})", restricted_count).into()
};
let trust_label = self.build_trust_label();
@@ -102,32 +105,61 @@ impl Render for SecurityModal {
.child(Icon::new(IconName::Warning).color(Color::Warning))
.child(Label::new(header_label)),
)
- .children(self.restricted_paths.values().filter_map(|restricted_path| {
- let abs_path = if restricted_path.is_file {
- restricted_path.abs_path.parent()
- } else {
- Some(restricted_path.abs_path.as_ref())
- }?;
- let label = match &restricted_path.host {
- Some(remote_host) => match &remote_host.user_name {
- Some(user_name) => format!(
- "{} ({}@{})",
- self.shorten_path(abs_path).display(),
- user_name,
- remote_host.host_identifier
- ),
- None => format!(
- "{} ({})",
- self.shorten_path(abs_path).display(),
- remote_host.host_identifier
- ),
- },
- None => self.shorten_path(abs_path).display().to_string(),
- };
- Some(h_flex()
- .pl(IconSize::default().rems() + rems(0.5))
- .child(Label::new(label).color(Color::Muted)))
- })),
+ .child(
+ div()
+ .size_full()
+ .vertical_scrollbar_for(&self.project_list_scroll_handle, window, cx)
+ .child(
+ v_flex()
+ .id("paths_container")
+ .max_h_24()
+ .overflow_y_scroll()
+ .track_scroll(&self.project_list_scroll_handle)
+ .children(
+ self.restricted_paths.values().filter_map(
+ |restricted_path| {
+ let abs_path = if restricted_path.is_file {
+ restricted_path.abs_path.parent()
+ } else {
+ Some(restricted_path.abs_path.as_ref())
+ }?;
+ let label = match &restricted_path.host {
+ Some(remote_host) => {
+ match &remote_host.user_name {
+ Some(user_name) => format!(
+ "{} ({}@{})",
+ self.shorten_path(abs_path)
+ .display(),
+ user_name,
+ remote_host.host_identifier
+ ),
+ None => format!(
+ "{} ({})",
+ self.shorten_path(abs_path)
+ .display(),
+ remote_host.host_identifier
+ ),
+ }
+ }
+ None => self
+ .shorten_path(abs_path)
+ .display()
+ .to_string(),
+ };
+ Some(
+ h_flex()
+ .pl(
+ IconSize::default().rems() + rems(0.5),
+ )
+ .child(
+ Label::new(label).color(Color::Muted),
+ ),
+ )
+ },
+ ),
+ ),
+ ),
+ ),
)
.child(
v_flex()
@@ -219,6 +251,7 @@ impl SecurityModal {
remote_host: remote_host.map(|host| host.into()),
restricted_paths: HashMap::default(),
focus_handle: cx.focus_handle(),
+ project_list_scroll_handle: ScrollHandle::new(),
trust_parents: false,
home_dir: std::env::home_dir(),
trusted: None,
@@ -1,13 +1,14 @@
use std::process::ExitStatus;
use anyhow::Result;
+use collections::HashSet;
use gpui::{AppContext, Context, Entity, Task};
use language::Buffer;
use project::{TaskSourceKind, WorktreeId};
use remote::ConnectionState;
use task::{
DebugScenario, ResolvedTask, SaveStrategy, SharedTaskContext, SpawnInTerminal, TaskContext,
- TaskTemplate,
+ TaskHook, TaskTemplate, TaskVariables, VariableName,
};
use ui::Window;
use util::TryFutureExt;
@@ -164,6 +165,111 @@ impl Workspace {
Task::ready(None)
}
}
+
+ pub fn run_create_worktree_tasks(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ let project = self.project().clone();
+ let hooks = HashSet::from_iter([TaskHook::CreateWorktree]);
+
+ let worktree_tasks: Vec<(WorktreeId, TaskContext, Vec<TaskTemplate>)> = {
+ let project = project.read(cx);
+ let task_store = project.task_store();
+ let Some(inventory) = task_store.read(cx).task_inventory().cloned() else {
+ return;
+ };
+
+ let git_store = project.git_store().read(cx);
+
+ let mut worktree_tasks = Vec::new();
+ for worktree in project.worktrees(cx) {
+ let worktree = worktree.read(cx);
+ let worktree_id = worktree.id();
+ let worktree_abs_path = worktree.abs_path();
+
+ let templates: Vec<TaskTemplate> = inventory
+ .read(cx)
+ .templates_with_hooks(&hooks, worktree_id)
+ .into_iter()
+ .map(|(_, template)| template)
+ .collect();
+
+ if templates.is_empty() {
+ continue;
+ }
+
+ let mut task_variables = TaskVariables::default();
+ task_variables.insert(
+ VariableName::WorktreeRoot,
+ worktree_abs_path.to_string_lossy().into_owned(),
+ );
+
+ if let Some(path) = git_store.original_repo_path_for_worktree(worktree_id, cx) {
+ task_variables.insert(
+ VariableName::MainGitWorktree,
+ path.to_string_lossy().into_owned(),
+ );
+ }
+
+ let task_context = TaskContext {
+ cwd: Some(worktree_abs_path.to_path_buf()),
+ task_variables,
+ project_env: Default::default(),
+ };
+
+ worktree_tasks.push((worktree_id, task_context, templates));
+ }
+ worktree_tasks
+ };
+
+ if worktree_tasks.is_empty() {
+ return;
+ }
+
+ let task = cx.spawn_in(window, async move |workspace, cx| {
+ let mut tasks = Vec::new();
+ for (worktree_id, task_context, templates) in worktree_tasks {
+ let id_base = format!("worktree_setup_{worktree_id}");
+
+ tasks.push(cx.spawn({
+ let workspace = workspace.clone();
+ async move |cx| {
+ for task_template in templates {
+ let Some(resolved) =
+ task_template.resolve_task(&id_base, &task_context)
+ else {
+ continue;
+ };
+
+ let status = workspace.update_in(cx, |workspace, window, cx| {
+ workspace.spawn_in_terminal(resolved.resolved, window, cx)
+ })?;
+
+ if let Some(result) = status.await {
+ match result {
+ Ok(exit_status) if !exit_status.success() => {
+ log::error!(
+ "Git worktree setup task failed with status: {:?}",
+ exit_status.code()
+ );
+ break;
+ }
+ Err(error) => {
+ log::error!("Git worktree setup task error: {error:#}");
+ break;
+ }
+ _ => {}
+ }
+ }
+ }
+ anyhow::Ok(())
+ }
+ }));
+ }
+
+ futures::future::join_all(tasks).await;
+ anyhow::Ok(())
+ });
+ task.detach_and_log_err(cx);
+ }
}
#[cfg(test)]
@@ -326,7 +326,7 @@ impl WelcomePage {
self.workspace
.update(cx, |workspace, cx| {
workspace
- .open_workspace_for_paths(OpenMode::Replace, paths, window, cx)
+ .open_workspace_for_paths(OpenMode::Activate, paths, window, cx)
.detach_and_log_err(cx);
})
.log_err();
@@ -19,6 +19,7 @@ mod security_modal;
pub mod shared_screen;
use db::smol::future::yield_now;
pub use shared_screen::SharedScreen;
+pub mod focus_follows_mouse;
mod status_bar;
pub mod tasks;
mod theme_preview;
@@ -31,8 +32,8 @@ pub use crate::notifications::NotificationFrame;
pub use dock::Panel;
pub use multi_workspace::{
CloseWorkspaceSidebar, DraggedSidebar, FocusWorkspaceSidebar, MultiWorkspace,
- MultiWorkspaceEvent, NextWorkspace, PreviousWorkspace, Sidebar, SidebarEvent, SidebarHandle,
- SidebarRenderState, SidebarSide, ToggleWorkspaceSidebar, sidebar_side_context_menu,
+ MultiWorkspaceEvent, Sidebar, SidebarEvent, SidebarHandle, SidebarRenderState, SidebarSide,
+ ToggleWorkspaceSidebar, sidebar_side_context_menu,
};
pub use path_list::{PathList, SerializedPathList};
pub use toast_layer::{ToastAction, ToastLayer, ToastView};
@@ -83,15 +84,15 @@ use persistence::{SerializedWindowBounds, model::SerializedWorkspace};
pub use persistence::{
WorkspaceDb, delete_unloaded_items,
model::{
- DockStructure, ItemId, SerializedMultiWorkspace, SerializedWorkspaceLocation,
- SessionWorkspace,
+ DockStructure, ItemId, MultiWorkspaceState, SerializedMultiWorkspace,
+ SerializedWorkspaceLocation, SessionWorkspace,
},
read_serialized_multi_workspaces, resolve_worktree_workspaces,
};
use postage::stream::Stream;
use project::{
- DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId,
- WorktreeSettings,
+ DirectoryLister, Project, ProjectEntryId, ProjectGroupKey, ProjectPath, ResolvedPath, Worktree,
+ WorktreeId, WorktreeSettings,
debugger::{breakpoint_store::BreakpointStoreEvent, session::ThreadStatus},
project_settings::ProjectSettings,
toolchain_store::ToolchainStoreEvent,
@@ -147,8 +148,8 @@ use util::{
};
use uuid::Uuid;
pub use workspace_settings::{
- AutosaveSetting, BottomDockLayout, RestoreOnStartupBehavior, StatusBarSettings, TabBarSettings,
- WorkspaceSettings,
+ AutosaveSetting, BottomDockLayout, FocusFollowsMouse, RestoreOnStartupBehavior,
+ StatusBarSettings, TabBarSettings, WorkspaceSettings,
};
use zed_actions::{Spawn, feedback::FileBugReport, theme::ToggleMode};
@@ -655,13 +656,25 @@ impl From<WorkspaceId> for i64 {
}
}
-fn prompt_and_open_paths(app_state: Arc<AppState>, options: PathPromptOptions, cx: &mut App) {
+fn prompt_and_open_paths(
+ app_state: Arc<AppState>,
+ options: PathPromptOptions,
+ create_new_window: bool,
+ cx: &mut App,
+) {
if let Some(workspace_window) = local_workspace_windows(cx).into_iter().next() {
workspace_window
.update(cx, |multi_workspace, window, cx| {
let workspace = multi_workspace.workspace().clone();
workspace.update(cx, |workspace, cx| {
- prompt_for_open_path_and_open(workspace, app_state, options, true, window, cx);
+ prompt_for_open_path_and_open(
+ workspace,
+ app_state,
+ options,
+ create_new_window,
+ window,
+ cx,
+ );
});
})
.ok();
@@ -672,7 +685,7 @@ fn prompt_and_open_paths(app_state: Arc<AppState>, options: PathPromptOptions, c
None,
None,
None,
- OpenMode::Replace,
+ OpenMode::Activate,
cx,
);
cx.spawn(async move |cx| {
@@ -681,7 +694,14 @@ fn prompt_and_open_paths(app_state: Arc<AppState>, options: PathPromptOptions, c
window.activate_window();
let workspace = multi_workspace.workspace().clone();
workspace.update(cx, |workspace, cx| {
- prompt_for_open_path_and_open(workspace, app_state, options, true, window, cx);
+ prompt_for_open_path_and_open(
+ workspace,
+ app_state,
+ options,
+ create_new_window,
+ window,
+ cx,
+ );
});
})?;
anyhow::Ok(())
@@ -713,7 +733,7 @@ pub fn prompt_for_open_path_and_open(
if let Some(handle) = multi_workspace_handle {
if let Some(task) = handle
.update(cx, |multi_workspace, window, cx| {
- multi_workspace.open_project(paths, OpenMode::Replace, window, cx)
+ multi_workspace.open_project(paths, OpenMode::Activate, window, cx)
})
.log_err()
{
@@ -742,7 +762,7 @@ pub fn init(app_state: Arc<AppState>, cx: &mut App) {
cx.on_action(|_: &CloseWindow, cx| Workspace::close_global(cx))
.on_action(|_: &Reload, cx| reload(cx))
- .on_action(|_: &Open, cx: &mut App| {
+ .on_action(|action: &Open, cx: &mut App| {
let app_state = AppState::global(cx);
prompt_and_open_paths(
app_state,
@@ -752,6 +772,7 @@ pub fn init(app_state: Arc<AppState>, cx: &mut App) {
multiple: true,
prompt: None,
},
+ action.create_new_window,
cx,
);
})
@@ -766,6 +787,7 @@ pub fn init(app_state: Arc<AppState>, cx: &mut App) {
multiple: true,
prompt: None,
},
+ true,
cx,
);
});
@@ -1344,6 +1366,8 @@ pub struct Workspace {
scheduled_tasks: Vec<Task<()>>,
last_open_dock_positions: Vec<DockPosition>,
removing: bool,
+ open_in_dev_container: bool,
+ _dev_container_task: Option<Task<Result<()>>>,
_panels_task: Option<Task<Result<()>>>,
sidebar_focus_handle: Option<FocusHandle>,
multi_workspace: Option<WeakEntity<MultiWorkspace>>,
@@ -1378,8 +1402,6 @@ pub enum OpenMode {
/// Add to the window's multi workspace and activate it.
#[default]
Activate,
- /// Replace the currently active workspace, and any of it's linked workspaces
- Replace,
}
impl Workspace {
@@ -1778,6 +1800,8 @@ impl Workspace {
removing: false,
sidebar_focus_handle: None,
multi_workspace,
+ open_in_dev_container: false,
+ _dev_container_task: None,
}
}
@@ -1917,9 +1941,6 @@ impl Workspace {
workspace
});
match open_mode {
- OpenMode::Replace => {
- multi_workspace.replace(workspace.clone(), &*window, cx);
- }
OpenMode::Activate => {
multi_workspace.activate(workspace.clone(), window, cx);
}
@@ -2052,6 +2073,10 @@ impl Workspace {
})
}
+ pub fn project_group_key(&self, cx: &App) -> ProjectGroupKey {
+ self.project.read(cx).project_group_key(cx)
+ }
+
pub fn weak_handle(&self) -> WeakEntity<Self> {
self.weak_self.clone()
}
@@ -2800,6 +2825,18 @@ impl Workspace {
self.debugger_provider = Some(Arc::new(provider));
}
+ pub fn set_open_in_dev_container(&mut self, value: bool) {
+ self.open_in_dev_container = value;
+ }
+
+ pub fn open_in_dev_container(&self) -> bool {
+ self.open_in_dev_container
+ }
+
+ pub fn set_dev_container_task(&mut self, task: Task<Result<()>>) {
+ self._dev_container_task = Some(task);
+ }
+
pub fn debugger_provider(&self) -> Option<Arc<dyn DebuggerProvider>> {
self.debugger_provider.clone()
}
@@ -3026,7 +3063,6 @@ impl Workspace {
self.project.read(cx).visible_worktrees(cx)
}
- #[cfg(any(test, feature = "test-support"))]
pub fn worktree_scans_complete(&self, cx: &App) -> impl Future<Output = ()> + 'static + use<> {
let futures = self
.worktrees(cx)
@@ -3394,7 +3430,7 @@ impl Workspace {
let workspace_is_empty = !is_remote && !has_worktree && !has_dirty_items;
if workspace_is_empty {
- open_mode = OpenMode::Replace;
+ open_mode = OpenMode::Activate;
}
let app_state = self.app_state.clone();
@@ -5528,7 +5564,9 @@ impl Workspace {
if let Some(project_id) = other_project_id {
let app_state = self.app_state.clone();
crate::join_in_room_project(project_id, remote_participant.user.id, app_state, cx)
- .detach_and_log_err(cx);
+ .detach_and_prompt_err("Failed to join project", window, cx, |error, _, _| {
+ Some(format!("{error:#}"))
+ });
}
}
@@ -7676,11 +7714,6 @@ impl GlobalAnyActiveCall {
}
}
-pub fn merge_conflict_notification_id() -> NotificationId {
- struct MergeConflictNotification;
- NotificationId::unique::<MergeConflictNotification>()
-}
-
/// Workspace-local view of a remote participant's location.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ParticipantLocation {
@@ -8604,30 +8637,32 @@ pub async fn last_session_workspace_locations(
.log_err()
}
-pub struct MultiWorkspaceRestoreResult {
- pub window_handle: WindowHandle<MultiWorkspace>,
- pub errors: Vec<anyhow::Error>,
-}
-
pub async fn restore_multiworkspace(
multi_workspace: SerializedMultiWorkspace,
app_state: Arc<AppState>,
cx: &mut AsyncApp,
-) -> anyhow::Result<MultiWorkspaceRestoreResult> {
- let SerializedMultiWorkspace { workspaces, state } = multi_workspace;
- let mut group_iter = workspaces.into_iter();
- let first = group_iter
- .next()
- .context("window group must not be empty")?;
-
- let window_handle = if first.paths.is_empty() {
- cx.update(|cx| open_workspace_by_id(first.workspace_id, app_state.clone(), None, cx))
- .await?
+) -> anyhow::Result<WindowHandle<MultiWorkspace>> {
+ let SerializedMultiWorkspace {
+ active_workspace,
+ state,
+ } = multi_workspace;
+ let MultiWorkspaceState {
+ sidebar_open,
+ project_group_keys,
+ sidebar_state,
+ ..
+ } = state;
+
+ let window_handle = if active_workspace.paths.is_empty() {
+ cx.update(|cx| {
+ open_workspace_by_id(active_workspace.workspace_id, app_state.clone(), None, cx)
+ })
+ .await?
} else {
let OpenResult { window, .. } = cx
.update(|cx| {
Workspace::new_local(
- first.paths.paths().to_vec(),
+ active_workspace.paths.paths().to_vec(),
app_state.clone(),
None,
None,
@@ -8640,65 +8675,17 @@ pub async fn restore_multiworkspace(
window
};
- let mut errors = Vec::new();
-
- for session_workspace in group_iter {
- let error = if session_workspace.paths.is_empty() {
- cx.update(|cx| {
- open_workspace_by_id(
- session_workspace.workspace_id,
- app_state.clone(),
- Some(window_handle),
- cx,
- )
- })
- .await
- .err()
- } else {
- cx.update(|cx| {
- Workspace::new_local(
- session_workspace.paths.paths().to_vec(),
- app_state.clone(),
- Some(window_handle),
- None,
- None,
- OpenMode::Add,
- cx,
- )
- })
- .await
- .err()
- };
-
- if let Some(error) = error {
- errors.push(error);
- }
- }
-
- if let Some(target_id) = state.active_workspace_id {
+ if !project_group_keys.is_empty() {
+ let restored_keys: Vec<ProjectGroupKey> =
+ project_group_keys.into_iter().map(Into::into).collect();
window_handle
- .update(cx, |multi_workspace, window, cx| {
- let target_index = multi_workspace
- .workspaces()
- .iter()
- .position(|ws| ws.read(cx).database_id() == Some(target_id));
- let index = target_index.unwrap_or(0);
- if let Some(workspace) = multi_workspace.workspaces().get(index).cloned() {
- multi_workspace.activate(workspace, window, cx);
- }
- })
- .ok();
- } else {
- window_handle
- .update(cx, |multi_workspace, window, cx| {
- if let Some(workspace) = multi_workspace.workspaces().first().cloned() {
- multi_workspace.activate(workspace, window, cx);
- }
+ .update(cx, |multi_workspace, _window, _cx| {
+ multi_workspace.restore_project_group_keys(restored_keys);
})
.ok();
}
- if state.sidebar_open {
+ if sidebar_open {
window_handle
.update(cx, |multi_workspace, _, cx| {
multi_workspace.open_sidebar(cx);
@@ -8706,8 +8693,7 @@ pub async fn restore_multiworkspace(
.ok();
}
- if let Some(sidebar_state) = &state.sidebar_state {
- let sidebar_state = sidebar_state.clone();
+ if let Some(sidebar_state) = sidebar_state {
window_handle
.update(cx, |multi_workspace, window, cx| {
if let Some(sidebar) = multi_workspace.sidebar() {
@@ -8724,10 +8710,7 @@ pub async fn restore_multiworkspace(
})
.ok();
- Ok(MultiWorkspaceRestoreResult {
- window_handle,
- errors,
- })
+ Ok(window_handle)
}
actions!(
@@ -9096,7 +9079,7 @@ pub fn workspace_windows_for_location(
};
multi_workspace.read(cx).is_ok_and(|multi_workspace| {
- multi_workspace.workspaces().iter().any(|workspace| {
+ multi_workspace.workspaces().any(|workspace| {
match workspace.read(cx).workspace_location(cx) {
WorkspaceLocation::Location(location, _) => {
match (&location, serialized_location) {
@@ -9212,6 +9195,7 @@ pub struct OpenOptions {
pub requesting_window: Option<WindowHandle<MultiWorkspace>>,
pub open_mode: OpenMode,
pub env: Option<HashMap<String, String>>,
+ pub open_in_dev_container: bool,
}
/// The result of opening a workspace via [`open_paths`], [`Workspace::new_local`],
@@ -9341,7 +9325,7 @@ pub fn open_workspace_by_id(
pub fn open_paths(
abs_paths: &[PathBuf],
app_state: Arc<AppState>,
- open_options: OpenOptions,
+ mut open_options: OpenOptions,
cx: &mut App,
) -> Task<anyhow::Result<OpenResult>> {
let abs_paths = abs_paths.to_vec();
@@ -9366,10 +9350,9 @@ pub fn open_paths(
let all_metadatas = futures::future::join_all(all_paths)
.await
.into_iter()
- .filter_map(|result| result.ok().flatten())
- .collect::<Vec<_>>();
+ .filter_map(|result| result.ok().flatten());
- if all_metadatas.iter().all(|file| !file.is_dir) {
+ if all_metadatas.into_iter().all(|file| !file.is_dir) {
cx.update(|cx| {
let windows = workspace_windows_for_location(
&SerializedWorkspaceLocation::Local,
@@ -9391,12 +9374,46 @@ pub fn open_paths(
}
}
+ // Fallback for directories: when no flag is specified and no existing
+ // workspace matched, add the directory as a new workspace in the
+ // active window's MultiWorkspace (instead of opening a new window).
+ if open_options.open_new_workspace.is_none() && existing.is_none() {
+ let target_window = cx.update(|cx| {
+ let windows = workspace_windows_for_location(
+ &SerializedWorkspaceLocation::Local,
+ cx,
+ );
+ let window = cx
+ .active_window()
+ .and_then(|window| window.downcast::<MultiWorkspace>())
+ .filter(|window| windows.contains(window))
+ .or_else(|| windows.into_iter().next());
+ window.filter(|window| {
+ window.read(cx).is_ok_and(|mw| mw.multi_workspace_enabled(cx))
+ })
+ });
+
+ if let Some(window) = target_window {
+ open_options.requesting_window = Some(window);
+ window
+ .update(cx, |multi_workspace, _, cx| {
+ multi_workspace.open_sidebar(cx);
+ })
+ .log_err();
+ }
+ }
+
+ let open_in_dev_container = open_options.open_in_dev_container;
+
let result = if let Some((existing, target_workspace)) = existing {
let open_task = existing
.update(cx, |multi_workspace, window, cx| {
window.activate_window();
multi_workspace.activate(target_workspace.clone(), window, cx);
target_workspace.update(cx, |workspace, cx| {
+ if open_in_dev_container {
+ workspace.set_open_in_dev_container(true);
+ }
workspace.open_paths(
abs_paths,
OpenOptions {
@@ -9424,6 +9441,13 @@ pub fn open_paths(
Ok(OpenResult { window: existing, workspace: target_workspace, opened_items: open_task })
} else {
+ let init = if open_in_dev_container {
+ Some(Box::new(|workspace: &mut Workspace, _window: &mut Window, _cx: &mut Context<Workspace>| {
+ workspace.set_open_in_dev_container(true);
+ }) as Box<dyn FnOnce(&mut Workspace, &mut Window, &mut Context<Workspace>) + Send>)
+ } else {
+ None
+ };
let result = cx
.update(move |cx| {
Workspace::new_local(
@@ -9431,7 +9455,7 @@ pub fn open_paths(
app_state.clone(),
open_options.requesting_window,
open_options.env,
- None,
+ init,
open_options.open_mode,
cx,
)
@@ -10717,6 +10741,12 @@ mod tests {
cx.add_window(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx));
cx.run_until_parked();
+ multi_workspace_handle
+ .update(cx, |mw, _window, cx| {
+ mw.open_sidebar(cx);
+ })
+ .unwrap();
+
let workspace_a = multi_workspace_handle
.read_with(cx, |mw, _| mw.workspace().clone())
.unwrap();
@@ -10730,7 +10760,7 @@ mod tests {
// Activate workspace A
multi_workspace_handle
.update(cx, |mw, window, cx| {
- let workspace = mw.workspaces()[0].clone();
+ let workspace = mw.workspaces().next().unwrap().clone();
mw.activate(workspace, window, cx);
})
.unwrap();
@@ -10752,7 +10782,7 @@ mod tests {
// Verify workspace A is active
multi_workspace_handle
.read_with(cx, |mw, _| {
- assert_eq!(mw.active_workspace_index(), 0);
+ assert_eq!(mw.workspace(), &workspace_a);
})
.unwrap();
@@ -10768,8 +10798,8 @@ mod tests {
multi_workspace_handle
.read_with(cx, |mw, _| {
assert_eq!(
- mw.active_workspace_index(),
- 1,
+ mw.workspace(),
+ &workspace_b,
"workspace B should be activated when it prompts"
);
})
@@ -14487,6 +14517,12 @@ mod tests {
cx.add_window(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx));
cx.run_until_parked();
+ multi_workspace_handle
+ .update(cx, |mw, _window, cx| {
+ mw.open_sidebar(cx);
+ })
+ .unwrap();
+
let workspace_a = multi_workspace_handle
.read_with(cx, |mw, _| mw.workspace().clone())
.unwrap();
@@ -14500,7 +14536,7 @@ mod tests {
// Switch to workspace A
multi_workspace_handle
.update(cx, |mw, window, cx| {
- let workspace = mw.workspaces()[0].clone();
+ let workspace = mw.workspaces().next().unwrap().clone();
mw.activate(workspace, window, cx);
})
.unwrap();
@@ -14546,7 +14582,7 @@ mod tests {
// Switch to workspace B
multi_workspace_handle
.update(cx, |mw, window, cx| {
- let workspace = mw.workspaces()[1].clone();
+ let workspace = mw.workspaces().nth(1).unwrap().clone();
mw.activate(workspace, window, cx);
})
.unwrap();
@@ -14555,7 +14591,7 @@ mod tests {
// Switch back to workspace A
multi_workspace_handle
.update(cx, |mw, window, cx| {
- let workspace = mw.workspaces()[0].clone();
+ let workspace = mw.workspaces().next().unwrap().clone();
mw.activate(workspace, window, cx);
})
.unwrap();
@@ -1,4 +1,4 @@
-use std::num::NonZeroUsize;
+use std::{num::NonZeroUsize, time::Duration};
use crate::DockPosition;
use collections::HashMap;
@@ -35,6 +35,13 @@ pub struct WorkspaceSettings {
pub use_system_window_tabs: bool,
pub zoomed_padding: bool,
pub window_decorations: settings::WindowDecorations,
+ pub focus_follows_mouse: FocusFollowsMouse,
+}
+
+#[derive(Copy, Clone, Deserialize)]
+pub struct FocusFollowsMouse {
+ pub enabled: bool,
+ pub debounce: Duration,
}
#[derive(Copy, Clone, PartialEq, Debug, Default)]
@@ -113,6 +120,20 @@ impl Settings for WorkspaceSettings {
use_system_window_tabs: workspace.use_system_window_tabs.unwrap(),
zoomed_padding: workspace.zoomed_padding.unwrap(),
window_decorations: workspace.window_decorations.unwrap(),
+ focus_follows_mouse: FocusFollowsMouse {
+ enabled: workspace
+ .focus_follows_mouse
+ .unwrap()
+ .enabled
+ .unwrap_or(false),
+ debounce: Duration::from_millis(
+ workspace
+ .focus_follows_mouse
+ .unwrap()
+ .debounce_ms
+ .unwrap_or(250),
+ ),
+ },
}
}
}
@@ -176,6 +176,7 @@ pub struct Snapshot {
root_char_bag: CharBag,
entries_by_path: SumTree<Entry>,
entries_by_id: SumTree<PathEntry>,
+ root_repo_common_dir: Option<Arc<SanitizedPath>>,
always_included_entries: Vec<Arc<RelPath>>,
/// A number that increases every time the worktree begins scanning
@@ -368,6 +369,7 @@ struct UpdateObservationState {
pub enum Event {
UpdatedEntries(UpdatedEntriesSet),
UpdatedGitRepositories(UpdatedGitRepositoriesSet),
+ UpdatedRootRepoCommonDir,
DeletedEntry(ProjectEntryId),
/// The worktree root itself has been deleted (for single-file worktrees)
Deleted,
@@ -407,6 +409,10 @@ impl Worktree {
None
};
+ let root_repo_common_dir = discover_root_repo_common_dir(&abs_path, fs.as_ref())
+ .await
+ .map(SanitizedPath::from_arc);
+
Ok(cx.new(move |cx: &mut Context<Worktree>| {
let mut snapshot = LocalSnapshot {
ignores_by_parent_abs_path: Default::default(),
@@ -426,6 +432,7 @@ impl Worktree {
),
root_file_handle,
};
+ snapshot.root_repo_common_dir = root_repo_common_dir;
let worktree_id = snapshot.id();
let settings_location = Some(SettingsLocation {
@@ -564,6 +571,7 @@ impl Worktree {
this.update(cx, |this, cx| {
let mut entries_changed = false;
let this = this.as_remote_mut().unwrap();
+ let old_root_repo_common_dir = this.snapshot.root_repo_common_dir.clone();
{
let mut lock = this.background_snapshot.lock();
this.snapshot = lock.0.clone();
@@ -579,6 +587,9 @@ impl Worktree {
if entries_changed {
cx.emit(Event::UpdatedEntries(Arc::default()));
}
+ if this.snapshot.root_repo_common_dir != old_root_repo_common_dir {
+ cx.emit(Event::UpdatedRootRepoCommonDir);
+ }
cx.notify();
while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
if this.observed_snapshot(*scan_id) {
@@ -1183,6 +1194,13 @@ impl LocalWorktree {
cx: &mut Context<Worktree>,
) {
let repo_changes = self.changed_repos(&self.snapshot, &mut new_snapshot);
+
+ new_snapshot.root_repo_common_dir = new_snapshot
+ .local_repo_for_work_directory_path(RelPath::empty())
+ .map(|repo| SanitizedPath::from_arc(repo.common_dir_abs_path.clone()));
+
+ let root_repo_common_dir_changed =
+ self.snapshot.root_repo_common_dir != new_snapshot.root_repo_common_dir;
self.snapshot = new_snapshot;
if let Some(share) = self.update_observer.as_mut() {
@@ -1198,6 +1216,9 @@ impl LocalWorktree {
if !repo_changes.is_empty() {
cx.emit(Event::UpdatedGitRepositories(repo_changes));
}
+ if root_repo_common_dir_changed {
+ cx.emit(Event::UpdatedRootRepoCommonDir);
+ }
while let Some((scan_id, _)) = self.snapshot_subscriptions.front() {
if self.snapshot.completed_scan_id >= *scan_id {
@@ -2216,6 +2237,7 @@ impl Snapshot {
always_included_entries: Default::default(),
entries_by_path: Default::default(),
entries_by_id: Default::default(),
+ root_repo_common_dir: None,
scan_id: 1,
completed_scan_id: 0,
}
@@ -2241,6 +2263,12 @@ impl Snapshot {
SanitizedPath::cast_arc_ref(&self.abs_path)
}
+ pub fn root_repo_common_dir(&self) -> Option<&Arc<Path>> {
+ self.root_repo_common_dir
+ .as_ref()
+ .map(SanitizedPath::cast_arc_ref)
+ }
+
fn build_initial_update(&self, project_id: u64, worktree_id: u64) -> proto::UpdateWorktree {
let mut updated_entries = self
.entries_by_path
@@ -2254,6 +2282,9 @@ impl Snapshot {
worktree_id,
abs_path: self.abs_path().to_string_lossy().into_owned(),
root_name: self.root_name().to_proto(),
+ root_repo_common_dir: self
+ .root_repo_common_dir()
+ .map(|p| p.to_string_lossy().into_owned()),
updated_entries,
removed_entries: Vec::new(),
scan_id: self.scan_id as u64,
@@ -2399,6 +2430,10 @@ impl Snapshot {
self.entries_by_path.edit(entries_by_path_edits, ());
self.entries_by_id.edit(entries_by_id_edits, ());
+ self.root_repo_common_dir = update
+ .root_repo_common_dir
+ .map(|p| SanitizedPath::new_arc(Path::new(&p)));
+
self.scan_id = update.scan_id as usize;
if update.is_last_update {
self.completed_scan_id = update.scan_id as usize;
@@ -2627,6 +2662,9 @@ impl LocalSnapshot {
worktree_id,
abs_path: self.abs_path().to_string_lossy().into_owned(),
root_name: self.root_name().to_proto(),
+ root_repo_common_dir: self
+ .root_repo_common_dir()
+ .map(|p| p.to_string_lossy().into_owned()),
updated_entries,
removed_entries,
scan_id: self.scan_id as u64,
@@ -6071,6 +6109,16 @@ fn parse_gitfile(content: &str) -> anyhow::Result<&Path> {
Ok(Path::new(path.trim()))
}
+async fn discover_root_repo_common_dir(root_abs_path: &Path, fs: &dyn Fs) -> Option<Arc<Path>> {
+ let root_dot_git = root_abs_path.join(DOT_GIT);
+ if !fs.metadata(&root_dot_git).await.is_ok_and(|m| m.is_some()) {
+ return None;
+ }
+ let dot_git_path: Arc<Path> = root_dot_git.into();
+ let (_, common_dir) = discover_git_paths(&dot_git_path, fs).await;
+ Some(common_dir)
+}
+
async fn discover_git_paths(dot_git_abs_path: &Arc<Path>, fs: &dyn Fs) -> (Arc<Path>, Arc<Path>) {
let mut repository_dir_abs_path = dot_git_abs_path.clone();
let mut common_dir_abs_path = dot_git_abs_path.clone();
@@ -2736,6 +2736,97 @@ fn check_worktree_entries(
}
}
+#[gpui::test]
+async fn test_root_repo_common_dir(executor: BackgroundExecutor, cx: &mut TestAppContext) {
+ init_test(cx);
+
+ use git::repository::Worktree as GitWorktree;
+
+ let fs = FakeFs::new(executor);
+
+ // Set up a main repo and a linked worktree pointing back to it.
+ fs.insert_tree(
+ path!("/main_repo"),
+ json!({
+ ".git": {},
+ "file.txt": "content",
+ }),
+ )
+ .await;
+ fs.add_linked_worktree_for_repo(
+ Path::new(path!("/main_repo/.git")),
+ false,
+ GitWorktree {
+ path: PathBuf::from(path!("/linked_worktree")),
+ ref_name: Some("refs/heads/feature".into()),
+ sha: "abc123".into(),
+ is_main: false,
+ },
+ )
+ .await;
+ fs.write(
+ path!("/linked_worktree/file.txt").as_ref(),
+ "content".as_bytes(),
+ )
+ .await
+ .unwrap();
+
+ let tree = Worktree::local(
+ path!("/linked_worktree").as_ref(),
+ true,
+ fs.clone(),
+ Arc::default(),
+ true,
+ WorktreeId::from_proto(0),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+ tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
+ .await;
+ cx.run_until_parked();
+
+ // For a linked worktree, root_repo_common_dir should point to the
+ // main repo's .git, not the worktree-specific git directory.
+ tree.read_with(cx, |tree, _| {
+ assert_eq!(
+ tree.snapshot().root_repo_common_dir().map(|p| p.as_ref()),
+ Some(Path::new(path!("/main_repo/.git"))),
+ );
+ });
+
+ let event_count: Rc<Cell<usize>> = Rc::new(Cell::new(0));
+ tree.update(cx, {
+ let event_count = event_count.clone();
+ |_, cx| {
+ cx.subscribe(&cx.entity(), move |_, _, event, _| {
+ if matches!(event, Event::UpdatedRootRepoCommonDir) {
+ event_count.set(event_count.get() + 1);
+ }
+ })
+ .detach();
+ }
+ });
+
+ // Remove .git — root_repo_common_dir should become None.
+ fs.remove_file(
+ &PathBuf::from(path!("/linked_worktree/.git")),
+ Default::default(),
+ )
+ .await
+ .unwrap();
+ tree.flush_fs_events(cx).await;
+
+ tree.read_with(cx, |tree, _| {
+ assert_eq!(tree.snapshot().root_repo_common_dir(), None);
+ });
+ assert_eq!(
+ event_count.get(),
+ 1,
+ "should have emitted UpdatedRootRepoCommonDir on removal"
+ );
+}
+
fn init_test(cx: &mut gpui::TestAppContext) {
zlog::init_test();
@@ -10,7 +10,7 @@ use agent_ui::AgentPanel;
use anyhow::{Context as _, Error, Result};
use clap::Parser;
use cli::FORCE_CLI_MODE_ENV_VAR_NAME;
-use client::{Client, ProxySettings, UserStore, parse_zed_link};
+use client::{Client, ProxySettings, RefreshLlmTokenListener, UserStore, parse_zed_link};
use collab_ui::channel_view::ChannelView;
use collections::HashMap;
use crashes::InitCrashHandler;
@@ -664,7 +664,12 @@ fn main() {
);
copilot_ui::init(&app_state, cx);
- language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx);
+ language_model::init(cx);
+ RefreshLlmTokenListener::register(
+ app_state.client.clone(),
+ app_state.user_store.clone(),
+ cx,
+ );
language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx);
acp_tools::init(cx);
zed::telemetry_log::init(cx);
@@ -857,13 +862,13 @@ fn main() {
diff_paths,
wsl,
diff_all: diff_all_mode,
+ dev_container: args.dev_container,
})
}
match open_rx
- .try_next()
+ .try_recv()
.ok()
- .flatten()
.and_then(|request| OpenRequest::parse(request, cx).log_err())
{
Some(request) => {
@@ -1208,6 +1213,7 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
}
let mut task = None;
+ let dev_container = request.dev_container;
if !request.open_paths.is_empty() || !request.diff_paths.is_empty() {
let app_state = app_state.clone();
task = Some(cx.spawn(async move |cx| {
@@ -1218,7 +1224,10 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
&request.diff_paths,
request.diff_all,
app_state,
- workspace::OpenOptions::default(),
+ workspace::OpenOptions {
+ open_in_dev_container: dev_container,
+ ..Default::default()
+ },
cx,
)
.await?;
@@ -1354,16 +1363,10 @@ pub(crate) async fn restore_or_create_workspace(
let mut tasks = Vec::new();
for multi_workspace in multi_workspaces {
- match restore_multiworkspace(multi_workspace, app_state.clone(), cx).await {
- Ok(result) => {
- for error in result.errors {
- log::error!("Failed to restore workspace in group: {error:#}");
- results.push(Err(error));
- }
- }
- Err(e) => {
- results.push(Err(e));
- }
+ if let Err(error) = restore_multiworkspace(multi_workspace, app_state.clone(), cx).await
+ {
+ log::error!("Failed to restore workspace: {error:#}");
+ results.push(Err(error));
}
}
@@ -1636,6 +1639,13 @@ struct Args {
#[arg(long, value_name = "USER@DISTRO")]
wsl: Option<String>,
+ /// Open the project in a dev container.
+ ///
+ /// Automatically triggers "Reopen in Dev Container" if a `.devcontainer/`
+ /// configuration is found in the project directory.
+ #[arg(long)]
+ dev_container: bool,
+
/// Instructs zed to run as a dev server on this machine. (not implemented)
#[arg(long)]
dev_server_token: Option<String>,
@@ -109,7 +109,7 @@ use {
image::RgbaImage,
project::{AgentId, Project},
project_panel::ProjectPanel,
- settings::{NotifyWhenAgentWaiting, Settings as _},
+ settings::{NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, Settings as _},
settings_ui::SettingsWindow,
std::{
any::Any,
@@ -201,7 +201,12 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()>
});
prompt_store::init(cx);
let prompt_builder = prompt_store::PromptBuilder::load(app_state.fs.clone(), false, cx);
- language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx);
+ language_model::init(cx);
+ client::RefreshLlmTokenListener::register(
+ app_state.client.clone(),
+ app_state.user_store.clone(),
+ cx,
+ );
language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx);
git_ui::init(cx);
project::AgentRegistryStore::init_global(
@@ -231,7 +236,7 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()>
agent_settings::AgentSettings::override_global(
agent_settings::AgentSettings {
notify_when_agent_waiting: NotifyWhenAgentWaiting::Never,
- play_sound_when_agent_done: false,
+ play_sound_when_agent_done: PlaySoundWhenAgentDone::Never,
..agent_settings::AgentSettings::get_global(cx).clone()
},
cx,
@@ -2087,7 +2092,7 @@ fn run_agent_thread_view_test(
let mut tool_content: Vec<acp::ToolCallContent> = Vec::new();
let mut tool_locations: Vec<acp::ToolCallLocation> = Vec::new();
- while let Ok(Some(event)) = event_receiver.try_next() {
+ while let Ok(event) = event_receiver.try_recv() {
if let Ok(agent::ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateFields(
update,
))) = event
@@ -2622,7 +2627,7 @@ fn run_multi_workspace_sidebar_visual_tests(
// Add worktree to workspace 1 (index 0) so it shows as "private-test-remote"
let add_worktree1_task = multi_workspace_window
.update(cx, |multi_workspace, _window, cx| {
- let workspace1 = &multi_workspace.workspaces()[0];
+ let workspace1 = multi_workspace.workspaces().next().unwrap();
let project = workspace1.read(cx).project().clone();
project.update(cx, |project, cx| {
project.find_or_create_worktree(&workspace1_dir, true, cx)
@@ -2641,7 +2646,7 @@ fn run_multi_workspace_sidebar_visual_tests(
// Add worktree to workspace 2 (index 1) so it shows as "zed"
let add_worktree2_task = multi_workspace_window
.update(cx, |multi_workspace, _window, cx| {
- let workspace2 = &multi_workspace.workspaces()[1];
+ let workspace2 = multi_workspace.workspaces().nth(1).unwrap();
let project = workspace2.read(cx).project().clone();
project.update(cx, |project, cx| {
project.find_or_create_worktree(&workspace2_dir, true, cx)
@@ -2660,7 +2665,7 @@ fn run_multi_workspace_sidebar_visual_tests(
// Switch to workspace 1 so it's highlighted as active (index 0)
multi_workspace_window
.update(cx, |multi_workspace, window, cx| {
- let workspace = multi_workspace.workspaces()[0].clone();
+ let workspace = multi_workspace.workspaces().next().unwrap().clone();
multi_workspace.activate(workspace, window, cx);
})
.context("Failed to activate workspace 1")?;
@@ -2688,7 +2693,7 @@ fn run_multi_workspace_sidebar_visual_tests(
let save_tasks = multi_workspace_window
.update(cx, |multi_workspace, _window, cx| {
let thread_store = agent::ThreadStore::global(cx);
- let workspaces = multi_workspace.workspaces().to_vec();
+ let workspaces: Vec<_> = multi_workspace.workspaces().cloned().collect();
let mut tasks = Vec::new();
for (index, workspace) in workspaces.iter().enumerate() {
@@ -3500,7 +3505,7 @@ edition = "2021"
// Add the git project as a worktree
let add_worktree_task = workspace_window
.update(cx, |multi_workspace, _window, cx| {
- let workspace = &multi_workspace.workspaces()[0];
+ let workspace = multi_workspace.workspaces().next().unwrap();
let project = workspace.read(cx).project().clone();
project.update(cx, |project, cx| {
project.find_or_create_worktree(&project_path, true, cx)
@@ -3525,7 +3530,7 @@ edition = "2021"
// Open the project panel
let (weak_workspace, async_window_cx) = workspace_window
.update(cx, |multi_workspace, window, cx| {
- let workspace = &multi_workspace.workspaces()[0];
+ let workspace = multi_workspace.workspaces().next().unwrap();
(workspace.read(cx).weak_handle(), window.to_async(cx))
})
.context("Failed to get workspace handle")?;
@@ -3539,7 +3544,7 @@ edition = "2021"
workspace_window
.update(cx, |multi_workspace, window, cx| {
- let workspace = &multi_workspace.workspaces()[0];
+ let workspace = multi_workspace.workspaces().next().unwrap();
workspace.update(cx, |workspace, cx| {
workspace.add_panel(project_panel, window, cx);
workspace.open_panel::<ProjectPanel>(window, cx);
@@ -3552,7 +3557,7 @@ edition = "2021"
// Open main.rs in the editor
let open_file_task = workspace_window
.update(cx, |multi_workspace, window, cx| {
- let workspace = &multi_workspace.workspaces()[0];
+ let workspace = multi_workspace.workspaces().next().unwrap();
workspace.update(cx, |workspace, cx| {
let worktree = workspace.project().read(cx).worktrees(cx).next();
if let Some(worktree) = worktree {
@@ -3580,7 +3585,7 @@ edition = "2021"
// Load the AgentPanel
let (weak_workspace, async_window_cx) = workspace_window
.update(cx, |multi_workspace, window, cx| {
- let workspace = &multi_workspace.workspaces()[0];
+ let workspace = multi_workspace.workspaces().next().unwrap();
(workspace.read(cx).weak_handle(), window.to_async(cx))
})
.context("Failed to get workspace handle for agent panel")?;
@@ -3624,7 +3629,7 @@ edition = "2021"
workspace_window
.update(cx, |multi_workspace, window, cx| {
- let workspace = &multi_workspace.workspaces()[0];
+ let workspace = multi_workspace.workspaces().next().unwrap();
workspace.update(cx, |workspace, cx| {
workspace.add_panel(panel.clone(), window, cx);
workspace.open_panel::<AgentPanel>(window, cx);
@@ -3801,7 +3806,7 @@ edition = "2021"
.is_none()
});
let workspace_count = workspace_window.update(cx, |multi_workspace, _window, _cx| {
- multi_workspace.workspaces().len()
+ multi_workspace.workspaces().count()
})?;
if workspace_count == 2 && status_cleared {
creation_complete = true;
@@ -3820,7 +3825,7 @@ edition = "2021"
// error state by injecting the stub server, and shrink the panel so the
// editor content is visible.
workspace_window.update(cx, |multi_workspace, window, cx| {
- let new_workspace = &multi_workspace.workspaces()[1];
+ let new_workspace = multi_workspace.workspaces().nth(1).unwrap();
new_workspace.update(cx, |workspace, cx| {
if let Some(new_panel) = workspace.panel::<AgentPanel>(cx) {
new_panel.update(cx, |panel, cx| {
@@ -3833,7 +3838,7 @@ edition = "2021"
// Type and send a message so the thread target dropdown disappears.
let new_panel = workspace_window.update(cx, |multi_workspace, _window, cx| {
- let new_workspace = &multi_workspace.workspaces()[1];
+ let new_workspace = multi_workspace.workspaces().nth(1).unwrap();
new_workspace.read(cx).panel::<AgentPanel>(cx)
})?;
if let Some(new_panel) = new_panel {
@@ -3874,7 +3879,7 @@ edition = "2021"
workspace_window
.update(cx, |multi_workspace, _window, cx| {
- let workspace = &multi_workspace.workspaces()[0];
+ let workspace = multi_workspace.workspaces().next().unwrap();
let project = workspace.read(cx).project().clone();
project.update(cx, |project, cx| {
let worktree_ids: Vec<_> =
@@ -33,10 +33,11 @@ use git_ui::commit_view::CommitViewToolbar;
use git_ui::git_panel::GitPanel;
use git_ui::project_diff::{BranchDiffToolbar, ProjectDiffToolbar};
use gpui::{
- Action, App, AppContext as _, AsyncWindowContext, Context, DismissEvent, Element, Entity,
- Focusable, KeyBinding, ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString,
- Task, TitlebarOptions, UpdateGlobal, WeakEntity, Window, WindowHandle, WindowKind,
- WindowOptions, actions, image_cache, point, px, retain_all,
+ Action, App, AppContext as _, AsyncWindowContext, ClipboardItem, Context, DismissEvent,
+ Element, Entity, FocusHandle, Focusable, Image, ImageFormat, KeyBinding, ParentElement,
+ PathPromptOptions, PromptLevel, ReadGlobal, SharedString, Size, Task, TitlebarOptions,
+ UpdateGlobal, WeakEntity, Window, WindowBounds, WindowHandle, WindowKind, WindowOptions,
+ actions, image_cache, img, point, px, retain_all,
};
use image_viewer::ImageInfo;
use language::Capability;
@@ -78,7 +79,7 @@ use std::{
use terminal_view::terminal_panel::{self, TerminalPanel};
use theme::{ActiveTheme, SystemAppearance, ThemeRegistry, deserialize_icon_theme};
use theme_settings::{ThemeSettings, load_user_theme};
-use ui::{PopoverMenuHandle, prelude::*};
+use ui::{Navigable, NavigableEntry, PopoverMenuHandle, TintColor, prelude::*};
use util::markdown::MarkdownString;
use util::rel_path::RelPath;
use util::{ResultExt, asset_str, maybe};
@@ -96,8 +97,8 @@ use workspace::{
};
use workspace::{Pane, notifications::DetachAndPromptErr};
use zed_actions::{
- OpenAccountSettings, OpenBrowser, OpenDocs, OpenServerSettings, OpenSettingsFile, OpenZedUrl,
- Quit,
+ About, OpenAccountSettings, OpenBrowser, OpenDocs, OpenServerSettings, OpenSettingsFile,
+ OpenZedUrl, Quit,
};
actions!(
@@ -277,10 +278,8 @@ pub fn init(cx: &mut App) {
);
});
})
- .on_action(|_: &zed_actions::About, cx| {
- with_active_or_new_workspace(cx, |workspace, window, cx| {
- about(workspace, window, cx);
- });
+ .on_action(|_: &About, cx| {
+ open_about_window(cx);
});
}
@@ -503,12 +502,15 @@ pub fn initialize_workspace(app_state: Arc<AppState>, cx: &mut App) {
cx.new(|_| go_to_line::cursor_position::CursorPosition::new(workspace));
let line_ending_indicator =
cx.new(|_| line_ending_selector::LineEndingIndicator::default());
+ let merge_conflict_indicator =
+ cx.new(|cx| git_ui::MergeConflictIndicator::new(workspace, cx));
workspace.status_bar().update(cx, |status_bar, cx| {
status_bar.add_left_item(search_button, window, cx);
status_bar.add_left_item(lsp_button, window, cx);
status_bar.add_left_item(diagnostic_summary, window, cx);
status_bar.add_left_item(active_file_name, window, cx);
status_bar.add_left_item(activity_indicator, window, cx);
+ status_bar.add_left_item(merge_conflict_indicator, window, cx);
status_bar.add_right_item(edit_prediction_ui, window, cx);
status_bar.add_right_item(active_buffer_encoding, window, cx);
status_bar.add_right_item(active_buffer_language, window, cx);
@@ -1249,44 +1251,218 @@ fn initialize_pane(
});
}
-fn about(_: &mut Workspace, window: &mut Window, cx: &mut Context<Workspace>) {
- use std::fmt::Write;
- let release_channel = ReleaseChannel::global(cx).display_name();
- let full_version = AppVersion::global(cx);
- let version = env!("CARGO_PKG_VERSION");
- let debug = if cfg!(debug_assertions) {
- "(debug)"
- } else {
- ""
- };
- let message = format!("{release_channel} {version} {debug}");
+fn open_about_window(cx: &mut App) {
+ fn about_window_icon(release_channel: ReleaseChannel) -> Arc<Image> {
+ let bytes = match release_channel {
+ ReleaseChannel::Dev => include_bytes!("../resources/app-icon-dev.png").as_slice(),
+ ReleaseChannel::Nightly => {
+ include_bytes!("../resources/app-icon-nightly.png").as_slice()
+ }
+ ReleaseChannel::Preview => {
+ include_bytes!("../resources/app-icon-preview.png").as_slice()
+ }
+ ReleaseChannel::Stable => include_bytes!("../resources/app-icon.png").as_slice(),
+ };
- let mut detail = AppCommitSha::try_global(cx)
- .map(|sha| sha.full())
- .unwrap_or_default();
- if !detail.is_empty() {
- detail.push('\n');
+ Arc::new(Image::from_bytes(ImageFormat::Png, bytes.to_vec()))
}
- _ = write!(&mut detail, "\n{full_version}");
- let detail = Some(detail);
+ struct AboutWindow {
+ focus_handle: FocusHandle,
+ ok_entry: NavigableEntry,
+ copy_entry: NavigableEntry,
+ app_icon: Arc<Image>,
+ message: SharedString,
+ commit: Option<SharedString>,
+ full_version: SharedString,
+ }
- let prompt = window.prompt(
- PromptLevel::Info,
- &message,
- detail.as_deref(),
- &["Copy", "OK"],
- cx,
- );
- cx.spawn(async move |_, cx| {
- if let Ok(0) = prompt.await {
- let content = format!("{}\n{}", message, detail.as_deref().unwrap_or(""));
- cx.update(|cx| {
- cx.write_to_clipboard(gpui::ClipboardItem::new_string(content));
- });
+ impl AboutWindow {
+ fn new(cx: &mut Context<Self>) -> Self {
+ let release_channel = ReleaseChannel::global(cx);
+ let release_channel_name = release_channel.display_name();
+ let full_version: SharedString = AppVersion::global(cx).to_string().into();
+ let version = env!("CARGO_PKG_VERSION");
+
+ let debug = if cfg!(debug_assertions) {
+ "(debug)"
+ } else {
+ ""
+ };
+ let message: SharedString = format!("{release_channel_name} {version} {debug}").into();
+ let commit = AppCommitSha::try_global(cx)
+ .map(|sha| sha.full())
+ .filter(|commit| !commit.is_empty())
+ .map(SharedString::from);
+
+ Self {
+ focus_handle: cx.focus_handle(),
+ ok_entry: NavigableEntry::focusable(cx),
+ copy_entry: NavigableEntry::focusable(cx),
+ app_icon: about_window_icon(release_channel),
+ message,
+ commit,
+ full_version,
+ }
}
- })
- .detach();
+
+ fn copy_details(&self, window: &mut Window, cx: &mut Context<Self>) {
+ let content = match self.commit.as_ref() {
+ Some(commit) => {
+ format!(
+ "{}\nCommit: {}\nVersion: {}",
+ self.message, commit, self.full_version
+ )
+ }
+ None => format!("{}\nVersion: {}", self.message, self.full_version),
+ };
+ cx.write_to_clipboard(ClipboardItem::new_string(content));
+ window.remove_window();
+ }
+ }
+
+ impl Render for AboutWindow {
+ fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let ok_is_focused = self.ok_entry.focus_handle.contains_focused(window, cx);
+ let copy_is_focused = self.copy_entry.focus_handle.contains_focused(window, cx);
+
+ Navigable::new(
+ v_flex()
+ .id("about-window")
+ .track_focus(&self.focus_handle)
+ .on_action(cx.listener(|_, _: &menu::Cancel, window, _cx| {
+ window.remove_window();
+ }))
+ .min_w_0()
+ .size_full()
+ .bg(cx.theme().colors().editor_background)
+ .text_color(cx.theme().colors().text)
+ .p_4()
+ .when(cfg!(target_os = "macos"), |this| this.pt_10())
+ .gap_4()
+ .text_center()
+ .justify_between()
+ .child(
+ v_flex()
+ .w_full()
+ .gap_2()
+ .items_center()
+ .child(img(self.app_icon.clone()).size_16().flex_none())
+ .child(Headline::new(self.message.clone()))
+ .when_some(self.commit.clone(), |this, commit| {
+ this.child(
+ Label::new("Commit")
+ .color(Color::Muted)
+ .size(LabelSize::XSmall),
+ )
+ .child(Label::new(commit).size(LabelSize::Small))
+ })
+ .child(
+ Label::new("Version")
+ .color(Color::Muted)
+ .size(LabelSize::XSmall),
+ )
+ .child(Label::new(self.full_version.clone()).size(LabelSize::Small)),
+ )
+ .child(
+ h_flex()
+ .w_full()
+ .gap_1()
+ .child(
+ div()
+ .flex_1()
+ .track_focus(&self.ok_entry.focus_handle)
+ .on_action(cx.listener(|_, _: &menu::Confirm, window, _cx| {
+ window.remove_window();
+ }))
+ .child(
+ Button::new("ok", "Ok")
+ .full_width()
+ .style(ButtonStyle::OutlinedGhost)
+ .toggle_state(ok_is_focused)
+ .selected_style(ButtonStyle::Tinted(TintColor::Accent))
+ .on_click(cx.listener(|_, _, window, _cx| {
+ window.remove_window();
+ })),
+ ),
+ )
+ .child(
+ div()
+ .flex_1()
+ .track_focus(&self.copy_entry.focus_handle)
+ .on_action(cx.listener(
+ |this, _: &menu::Confirm, window, cx| {
+ this.copy_details(window, cx);
+ },
+ ))
+ .child(
+ Button::new("copy", "Copy")
+ .full_width()
+ .style(ButtonStyle::Tinted(TintColor::Accent))
+ .toggle_state(copy_is_focused)
+ .selected_style(ButtonStyle::Tinted(TintColor::Accent))
+ .on_click(cx.listener(|this, _event, window, cx| {
+ this.copy_details(window, cx);
+ })),
+ ),
+ ),
+ )
+ .into_any_element(),
+ )
+ .entry(self.ok_entry.clone())
+ .entry(self.copy_entry.clone())
+ }
+ }
+
+ impl Focusable for AboutWindow {
+ fn focus_handle(&self, _cx: &App) -> FocusHandle {
+ self.ok_entry.focus_handle.clone()
+ }
+ }
+
+ // Don't open about window twice
+ if let Some(existing) = cx
+ .windows()
+ .into_iter()
+ .find_map(|w| w.downcast::<AboutWindow>())
+ {
+ existing
+ .update(cx, |about_window, window, cx| {
+ window.activate_window();
+ about_window.ok_entry.focus_handle.focus(window, cx);
+ })
+ .log_err();
+ return;
+ }
+
+ let window_size = Size {
+ width: px(440.),
+ height: px(300.),
+ };
+
+ cx.open_window(
+ WindowOptions {
+ titlebar: Some(TitlebarOptions {
+ title: Some("About Zed".into()),
+ appears_transparent: true,
+ traffic_light_position: Some(point(px(12.), px(12.))),
+ }),
+ window_bounds: Some(WindowBounds::centered(window_size, cx)),
+ is_resizable: false,
+ is_minimizable: false,
+ kind: WindowKind::Normal,
+ app_id: Some(ReleaseChannel::global(cx).app_id().to_owned()),
+ ..Default::default()
+ },
+ |window, cx| {
+ let about_window = cx.new(AboutWindow::new);
+ let focus_handle = about_window.read(cx).ok_entry.focus_handle.clone();
+ window.activate_window();
+ focus_handle.focus(window, cx);
+ about_window
+ },
+ )
+ .log_err();
}
#[cfg(not(target_os = "windows"))]
@@ -1348,7 +1524,7 @@ fn quit(_: &Quit, cx: &mut App) {
let window = *window;
let workspaces = window
.update(cx, |multi_workspace, _, _| {
- multi_workspace.workspaces().to_vec()
+ multi_workspace.workspaces().cloned().collect::<Vec<_>>()
})
.log_err();
@@ -2282,7 +2458,6 @@ mod tests {
.update(cx, |multi_workspace, window, cx| {
let mut tasks = multi_workspace
.workspaces()
- .iter()
.map(|workspace| {
workspace.update(cx, |workspace, cx| {
workspace.flush_serialization(window, cx)
@@ -2430,18 +2605,33 @@ mod tests {
})
.await
.unwrap();
- assert_eq!(cx.read(|cx| cx.windows().len()), 2);
-
- // Replace existing windows
- let window = cx
- .update(|cx| cx.windows()[0].downcast::<MultiWorkspace>())
+ assert_eq!(cx.read(|cx| cx.windows().len()), 1);
+ cx.run_until_parked();
+ multi_workspace_1
+ .update(cx, |multi_workspace, _window, cx| {
+ assert_eq!(multi_workspace.workspaces().count(), 2);
+ assert!(multi_workspace.sidebar_open());
+ let workspace = multi_workspace.workspace().read(cx);
+ assert_eq!(
+ workspace
+ .worktrees(cx)
+ .map(|w| w.read(cx).abs_path())
+ .collect::<Vec<_>>(),
+ &[
+ Path::new(path!("/root/c")).into(),
+ Path::new(path!("/root/d")).into(),
+ ]
+ );
+ })
.unwrap();
+
+ // Opening with -n (open_new_workspace: Some(true)) still creates a new window.
cx.update(|cx| {
open_paths(
&[PathBuf::from(path!("/root/e"))],
app_state,
workspace::OpenOptions {
- requesting_window: Some(window),
+ open_new_workspace: Some(true),
..Default::default()
},
cx,
@@ -2451,23 +2641,6 @@ mod tests {
.unwrap();
cx.background_executor.run_until_parked();
assert_eq!(cx.read(|cx| cx.windows().len()), 2);
- let multi_workspace_1 = cx
- .update(|cx| cx.windows()[0].downcast::<MultiWorkspace>())
- .unwrap();
- multi_workspace_1
- .update(cx, |multi_workspace, window, cx| {
- let workspace = multi_workspace.workspace().read(cx);
- assert_eq!(
- workspace
- .worktrees(cx)
- .map(|w| w.read(cx).abs_path())
- .collect::<Vec<_>>(),
- &[Path::new(path!("/root/e")).into()]
- );
- assert!(workspace.right_dock().read(cx).is_open());
- assert!(workspace.active_pane().focus_handle(cx).is_focused(window));
- })
- .unwrap();
}
#[gpui::test]
@@ -2548,7 +2721,6 @@ mod tests {
.await
.unwrap();
assert_eq!(cx.update(|cx| cx.windows().len()), 1);
- let window1 = cx.update(|cx| cx.active_window().unwrap());
cx.update(|cx| {
open_paths(
@@ -2562,6 +2734,8 @@ mod tests {
.unwrap();
assert_eq!(cx.update(|cx| cx.windows().len()), 1);
+ // Opening a directory with default options adds to the existing window
+ // rather than creating a new one.
cx.update(|cx| {
open_paths(
&[PathBuf::from(path!("/root/dir2"))],
@@ -2572,25 +2746,23 @@ mod tests {
})
.await
.unwrap();
- assert_eq!(cx.update(|cx| cx.windows().len()), 2);
- let window2 = cx.update(|cx| cx.active_window().unwrap());
- assert!(window1 != window2);
- cx.update_window(window1, |_, window, _| window.activate_window())
- .unwrap();
+ assert_eq!(cx.update(|cx| cx.windows().len()), 1);
+ // Opening a directory with -n creates a new window.
cx.update(|cx| {
open_paths(
- &[PathBuf::from(path!("/root/dir2/c"))],
+ &[PathBuf::from(path!("/root/dir2"))],
app_state.clone(),
- workspace::OpenOptions::default(),
+ workspace::OpenOptions {
+ open_new_workspace: Some(true),
+ ..Default::default()
+ },
cx,
)
})
.await
.unwrap();
assert_eq!(cx.update(|cx| cx.windows().len()), 2);
- // should have opened in window2 because that has dir2 visibly open (window1 has it open, but not in the project panel)
- assert!(cx.update(|cx| cx.active_window().unwrap()) == window2);
}
#[gpui::test]
@@ -4994,6 +5166,7 @@ mod tests {
app_state.languages.add(markdown_lang());
gpui_tokio::init(cx);
+ AppState::set_global(app_state.clone(), cx);
theme_settings::init(theme::LoadThemes::JustBase, cx);
audio::init(cx);
channel::init(&app_state.client, app_state.user_store.clone(), cx);
@@ -5015,7 +5188,12 @@ mod tests {
cx,
);
image_viewer::init(cx);
- language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx);
+ language_model::init(cx);
+ client::RefreshLlmTokenListener::register(
+ app_state.client.clone(),
+ app_state.user_store.clone(),
+ cx,
+ );
language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx);
web_search::init(cx);
git_graph::init(cx);
@@ -5333,6 +5511,11 @@ mod tests {
let project = project1.clone();
|window, cx| MultiWorkspace::test_new(project, window, cx)
});
+ window
+ .update(cx, |multi_workspace, _, cx| {
+ multi_workspace.open_sidebar(cx);
+ })
+ .unwrap();
cx.run_until_parked();
assert_eq!(cx.windows().len(), 1, "Should start with 1 window");
@@ -5355,7 +5538,7 @@ mod tests {
let workspace1 = window
.read_with(cx, |multi_workspace, _| {
- multi_workspace.workspaces()[0].clone()
+ multi_workspace.workspaces().next().unwrap().clone()
})
.unwrap();
@@ -5364,8 +5547,8 @@ mod tests {
multi_workspace.activate(workspace2.clone(), window, cx);
multi_workspace.activate(workspace3.clone(), window, cx);
// Switch back to workspace1 for test setup
- multi_workspace.activate(workspace1, window, cx);
- assert_eq!(multi_workspace.active_workspace_index(), 0);
+ multi_workspace.activate(workspace1.clone(), window, cx);
+ assert_eq!(multi_workspace.workspace(), &workspace1);
})
.unwrap();
@@ -5374,8 +5557,8 @@ mod tests {
// Verify setup: 3 workspaces, workspace 0 active, still 1 window
window
.read_with(cx, |multi_workspace, _| {
- assert_eq!(multi_workspace.workspaces().len(), 3);
- assert_eq!(multi_workspace.active_workspace_index(), 0);
+ assert_eq!(multi_workspace.workspaces().count(), 3);
+ assert_eq!(multi_workspace.workspace(), &workspace1);
})
.unwrap();
assert_eq!(cx.windows().len(), 1);
@@ -5398,8 +5581,8 @@ mod tests {
window
.read_with(cx, |multi_workspace, cx| {
assert_eq!(
- multi_workspace.active_workspace_index(),
- 2,
+ multi_workspace.workspace(),
+ &workspace3,
"Should have switched to workspace 3 which contains /dir3"
);
let active_item = multi_workspace
@@ -5432,8 +5615,8 @@ mod tests {
window
.read_with(cx, |multi_workspace, cx| {
assert_eq!(
- multi_workspace.active_workspace_index(),
- 1,
+ multi_workspace.workspace(),
+ &workspace2,
"Should have switched to workspace 2 which contains /dir2"
);
let active_item = multi_workspace
@@ -5481,8 +5664,8 @@ mod tests {
window
.read_with(cx, |multi_workspace, cx| {
assert_eq!(
- multi_workspace.active_workspace_index(),
- 0,
+ multi_workspace.workspace(),
+ &workspace1,
"Should have switched back to workspace 0 which contains /dir1"
);
let active_item = multi_workspace
@@ -5532,6 +5715,11 @@ mod tests {
let project = project1.clone();
|window, cx| MultiWorkspace::test_new(project, window, cx)
});
+ window1
+ .update(cx, |multi_workspace, _, cx| {
+ multi_workspace.open_sidebar(cx);
+ })
+ .unwrap();
cx.run_until_parked();
@@ -5558,6 +5746,11 @@ mod tests {
let project = project3.clone();
|window, cx| MultiWorkspace::test_new(project, window, cx)
});
+ window2
+ .update(cx, |multi_workspace, _, cx| {
+ multi_workspace.open_sidebar(cx);
+ })
+ .unwrap();
cx.run_until_parked();
assert_eq!(cx.windows().len(), 2);
@@ -5592,7 +5785,7 @@ mod tests {
// Verify workspace1_1 is active
window1
.read_with(cx, |multi_workspace, _| {
- assert_eq!(multi_workspace.active_workspace_index(), 0);
+ assert_eq!(multi_workspace.workspace(), &workspace1_1);
})
.unwrap();
@@ -5658,7 +5851,7 @@ mod tests {
// Verify workspace1_1 is still active (not workspace1_2 with dirty item)
window1
.read_with(cx, |multi_workspace, _| {
- assert_eq!(multi_workspace.active_workspace_index(), 0);
+ assert_eq!(multi_workspace.workspace(), &workspace1_1);
})
.unwrap();
@@ -5669,8 +5862,8 @@ mod tests {
window1
.read_with(cx, |multi_workspace, _| {
assert_eq!(
- multi_workspace.active_workspace_index(),
- 1,
+ multi_workspace.workspace(),
+ &workspace1_2,
"Case 2: Non-active workspace should be activated when it has dirty item"
);
})
@@ -5778,7 +5971,9 @@ mod tests {
#[gpui::test]
async fn test_multi_workspace_session_restore(cx: &mut TestAppContext) {
use collections::HashMap;
+ use project::ProjectGroupKey;
use session::Session;
+ use util::path_list::PathList;
use workspace::{OpenMode, Workspace, WorkspaceId};
let app_state = init_test(cx);
@@ -5821,6 +6016,12 @@ mod tests {
.await
.expect("failed to open first workspace");
+ window_a
+ .update(cx, |multi_workspace, _, cx| {
+ multi_workspace.open_sidebar(cx);
+ })
+ .unwrap();
+
window_a
.update(cx, |multi_workspace, window, cx| {
multi_workspace.open_project(vec![dir2.into()], OpenMode::Activate, window, cx)
@@ -5847,13 +6048,19 @@ mod tests {
.await
.expect("failed to open third workspace");
+ window_b
+ .update(cx, |multi_workspace, _, cx| {
+ multi_workspace.open_sidebar(cx);
+ })
+ .unwrap();
+
// Currently dir2 is active because it was added last.
// So, switch window_a's active workspace to dir1 (index 0).
// This sets up a non-trivial assertion: after restore, dir1 should
// still be active rather than whichever workspace happened to restore last.
window_a
.update(cx, |multi_workspace, window, cx| {
- let workspace = multi_workspace.workspaces()[0].clone();
+ let workspace = multi_workspace.workspaces().next().unwrap().clone();
multi_workspace.activate(workspace, window, cx);
})
.unwrap();
@@ -5938,94 +6145,50 @@ mod tests {
.filter_map(|window| window.downcast::<MultiWorkspace>())
.collect()
});
+ assert_eq!(restored_windows.len(), 2,);
+
+ // Identify restored windows by their active workspace root paths.
+ let (restored_a, restored_b) = {
+ let (mut with_dir1, mut with_dir3) = (None, None);
+ for window in &restored_windows {
+ let active_paths = window
+ .read_with(cx, |mw, cx| mw.workspace().read(cx).root_paths(cx))
+ .unwrap();
+ if active_paths.iter().any(|p| p.as_ref() == Path::new(dir1)) {
+ with_dir1 = Some(window);
+ } else {
+ with_dir3 = Some(window);
+ }
+ }
+ (
+ with_dir1.expect("expected a window with dir1 active"),
+ with_dir3.expect("expected a window with dir3 active"),
+ )
+ };
- assert_eq!(
- restored_windows.len(),
- 2,
- "expected 2 restored windows, got {}",
- restored_windows.len()
- );
-
- let workspace_counts: Vec<usize> = restored_windows
- .iter()
- .map(|window| {
- window
- .read_with(cx, |multi_workspace, _| multi_workspace.workspaces().len())
- .unwrap()
- })
- .collect();
- let mut sorted_counts = workspace_counts.clone();
- sorted_counts.sort();
- assert_eq!(
- sorted_counts,
- vec![1, 2],
- "expected one window with 1 workspace and one with 2, got {workspace_counts:?}"
- );
-
- let dir1_path: Arc<Path> = Path::new(dir1).into();
- let dir2_path: Arc<Path> = Path::new(dir2).into();
- let dir3_path: Arc<Path> = Path::new(dir3).into();
-
- let all_restored_paths: Vec<Vec<Vec<Arc<Path>>>> = restored_windows
- .iter()
- .map(|window| {
- window
- .read_with(cx, |multi_workspace, cx| {
- multi_workspace
- .workspaces()
- .iter()
- .map(|ws| ws.read(cx).root_paths(cx))
- .collect()
- })
- .unwrap()
+ // Window A (dir1+dir2): 1 workspace restored, but 2 project group keys.
+ restored_a
+ .read_with(cx, |mw, _| {
+ assert_eq!(
+ mw.project_group_keys().cloned().collect::<Vec<_>>(),
+ vec![
+ ProjectGroupKey::new(None, PathList::new(&[dir1])),
+ ProjectGroupKey::new(None, PathList::new(&[dir2])),
+ ]
+ );
+ assert_eq!(mw.workspaces().count(), 1);
})
- .collect();
-
- let two_ws_window = all_restored_paths
- .iter()
- .find(|paths| paths.len() == 2)
- .expect("expected a window with 2 workspaces");
- assert!(
- two_ws_window.iter().any(|p| p.contains(&dir1_path)),
- "2-workspace window should contain dir1, got {two_ws_window:?}"
- );
- assert!(
- two_ws_window.iter().any(|p| p.contains(&dir2_path)),
- "2-workspace window should contain dir2, got {two_ws_window:?}"
- );
-
- let one_ws_window = all_restored_paths
- .iter()
- .find(|paths| paths.len() == 1)
- .expect("expected a window with 1 workspace");
- assert!(
- one_ws_window[0].contains(&dir3_path),
- "1-workspace window should contain dir3, got {one_ws_window:?}"
- );
-
- // --- Verify the active workspace is preserved ---
- for window in &restored_windows {
- let (active_paths, workspace_count) = window
- .read_with(cx, |multi_workspace, cx| {
- let active = multi_workspace.workspace();
- (
- active.read(cx).root_paths(cx),
- multi_workspace.workspaces().len(),
- )
- })
- .unwrap();
+ .unwrap();
- if workspace_count == 2 {
- assert!(
- active_paths.contains(&dir1_path),
- "2-workspace window should have dir1 active, got {active_paths:?}"
- );
- } else {
- assert!(
- active_paths.contains(&dir3_path),
- "1-workspace window should have dir3 active, got {active_paths:?}"
+ // Window B (dir3): 1 workspace, 1 project group key.
+ restored_b
+ .read_with(cx, |mw, _| {
+ assert_eq!(
+ mw.project_group_keys().cloned().collect::<Vec<_>>(),
+ vec![ProjectGroupKey::new(None, PathList::new(&[dir3]))]
);
- }
- }
+ assert_eq!(mw.workspaces().count(), 1);
+ })
+ .unwrap();
}
}
@@ -313,7 +313,12 @@ mod tests {
let app_state = cx.update(|cx| {
let app_state = AppState::test(cx);
client::init(&app_state.client, cx);
- language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx);
+ language_model::init(cx);
+ client::RefreshLlmTokenListener::register(
+ app_state.client.clone(),
+ app_state.user_store.clone(),
+ cx,
+ );
editor::init(cx);
app_state
});
@@ -37,6 +37,7 @@ pub struct OpenRequest {
pub open_paths: Vec<String>,
pub diff_paths: Vec<[String; 2]>,
pub diff_all: bool,
+ pub dev_container: bool,
pub open_channel_notes: Vec<(u64, Option<String>)>,
pub join_channel: Option<u64>,
pub remote_connection: Option<RemoteConnectionOptions>,
@@ -78,6 +79,7 @@ impl OpenRequest {
this.diff_paths = request.diff_paths;
this.diff_all = request.diff_all;
+ this.dev_container = request.dev_container;
if let Some(wsl) = request.wsl {
let (user, distro_name) = if let Some((user, distro)) = wsl.split_once('@') {
if user.is_empty() {
@@ -256,6 +258,7 @@ pub struct RawOpenRequest {
pub urls: Vec<String>,
pub diff_paths: Vec<[String; 2]>,
pub diff_all: bool,
+ pub dev_container: bool,
pub wsl: Option<String>,
}
@@ -413,6 +416,7 @@ pub async fn handle_cli_connection(
reuse,
env,
user_data_dir: _,
+ dev_container,
} => {
if !urls.is_empty() {
cx.update(|cx| {
@@ -421,6 +425,7 @@ pub async fn handle_cli_connection(
urls,
diff_paths,
diff_all,
+ dev_container,
wsl,
},
cx,
@@ -450,6 +455,7 @@ pub async fn handle_cli_connection(
reuse,
&responses,
wait,
+ dev_container,
app_state.clone(),
env,
cx,
@@ -471,6 +477,7 @@ async fn open_workspaces(
reuse: bool,
responses: &IpcSender<CliResponse>,
wait: bool,
+ dev_container: bool,
app_state: Arc<AppState>,
env: Option<collections::HashMap<String, String>>,
cx: &mut AsyncApp,
@@ -532,6 +539,7 @@ async fn open_workspaces(
requesting_window: replace_window,
wait,
env: env.clone(),
+ open_in_dev_container: dev_container,
..Default::default()
};
@@ -1545,4 +1553,123 @@ mod tests {
})
.unwrap();
}
+
+ #[gpui::test]
+ async fn test_dev_container_flag_opens_modal(cx: &mut TestAppContext) {
+ let app_state = init_test(cx);
+ cx.update(|cx| recent_projects::init(cx));
+
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ path!("/project"),
+ json!({
+ ".devcontainer": {
+ "devcontainer.json": "{}"
+ },
+ "src": {
+ "main.rs": "fn main() {}"
+ }
+ }),
+ )
+ .await;
+
+ let (response_tx, _) = ipc::channel::<CliResponse>().unwrap();
+ let errored = cx
+ .spawn({
+ let app_state = app_state.clone();
+ |mut cx| async move {
+ open_local_workspace(
+ vec![path!("/project").to_owned()],
+ vec![],
+ false,
+ workspace::OpenOptions {
+ open_in_dev_container: true,
+ ..Default::default()
+ },
+ &response_tx,
+ &app_state,
+ &mut cx,
+ )
+ .await
+ }
+ })
+ .await;
+
+ assert!(!errored);
+
+ let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::<MultiWorkspace>().unwrap());
+ multi_workspace
+ .update(cx, |multi_workspace, _, cx| {
+ let flag = multi_workspace.workspace().read(cx).open_in_dev_container();
+ assert!(
+ !flag,
+ "open_in_dev_container flag should be consumed by suggest_on_worktree_updated"
+ );
+ })
+ .unwrap();
+ }
+
+ #[gpui::test]
+ async fn test_dev_container_flag_cleared_without_config(cx: &mut TestAppContext) {
+ let app_state = init_test(cx);
+ cx.update(|cx| recent_projects::init(cx));
+
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ path!("/project"),
+ json!({
+ "src": {
+ "main.rs": "fn main() {}"
+ }
+ }),
+ )
+ .await;
+
+ let (response_tx, _) = ipc::channel::<CliResponse>().unwrap();
+ let errored = cx
+ .spawn({
+ let app_state = app_state.clone();
+ |mut cx| async move {
+ open_local_workspace(
+ vec![path!("/project").to_owned()],
+ vec![],
+ false,
+ workspace::OpenOptions {
+ open_in_dev_container: true,
+ ..Default::default()
+ },
+ &response_tx,
+ &app_state,
+ &mut cx,
+ )
+ .await
+ }
+ })
+ .await;
+
+ assert!(!errored);
+
+ // Let any pending worktree scan events and updates settle.
+ cx.run_until_parked();
+
+ // With no .devcontainer config, the flag should be cleared once the
+ // worktree scan completes, rather than persisting on the workspace.
+ let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::<MultiWorkspace>().unwrap());
+ multi_workspace
+ .update(cx, |multi_workspace, _, cx| {
+ let flag = multi_workspace
+ .workspace()
+ .read(cx)
+ .open_in_dev_container();
+ assert!(
+ !flag,
+ "open_in_dev_container flag should be cleared when no devcontainer config exists"
+ );
+ })
+ .unwrap();
+ }
}
@@ -162,6 +162,7 @@ fn send_args_to_instance(args: &Args) -> anyhow::Result<()> {
reuse: false,
env: None,
user_data_dir: args.user_data_dir.clone(),
+ dev_container: args.dev_container,
}
};
@@ -0,0 +1,22 @@
+[package]
+name = "zed_credentials_provider"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/zed_credentials_provider.rs"
+
+[dependencies]
+anyhow.workspace = true
+credentials_provider.workspace = true
+futures.workspace = true
+gpui.workspace = true
+paths.workspace = true
+release_channel.workspace = true
+serde.workspace = true
+serde_json.workspace = true
@@ -0,0 +1 @@
+../../LICENSE-GPL
@@ -0,0 +1,181 @@
+use std::collections::HashMap;
+use std::future::Future;
+use std::path::PathBuf;
+use std::pin::Pin;
+use std::sync::{Arc, LazyLock};
+
+use anyhow::Result;
+use credentials_provider::CredentialsProvider;
+use futures::FutureExt as _;
+use gpui::{App, AsyncApp, Global};
+use release_channel::ReleaseChannel;
+
+/// An environment variable whose presence indicates that the system keychain
+/// should be used in development.
+///
+/// By default, running Zed in development uses the development credentials
+/// provider. Setting this environment variable allows you to interact with the
+/// system keychain (for instance, if you need to test something).
+///
+/// Only works in development. Setting this environment variable in other
+/// release channels is a no-op.
+static ZED_DEVELOPMENT_USE_KEYCHAIN: LazyLock<bool> = LazyLock::new(|| {
+ std::env::var("ZED_DEVELOPMENT_USE_KEYCHAIN").is_ok_and(|value| !value.is_empty())
+});
+
+pub struct ZedCredentialsProvider(pub Arc<dyn CredentialsProvider>);
+
+impl Global for ZedCredentialsProvider {}
+
+/// Returns the global [`CredentialsProvider`].
+pub fn init_global(cx: &mut App) {
+ // The `CredentialsProvider` trait has `Send + Sync` bounds on it, so it
+ // seems like this is a false positive from Clippy.
+ #[allow(clippy::arc_with_non_send_sync)]
+ let provider = new(cx);
+ cx.set_global(ZedCredentialsProvider(provider));
+}
+
+pub fn global(cx: &App) -> Arc<dyn CredentialsProvider> {
+ cx.try_global::<ZedCredentialsProvider>()
+ .map(|provider| provider.0.clone())
+ .unwrap_or_else(|| new(cx))
+}
+
+fn new(cx: &App) -> Arc<dyn CredentialsProvider> {
+ let use_development_provider = match ReleaseChannel::try_global(cx) {
+ Some(ReleaseChannel::Dev) => {
+ // In development we default to using the development
+ // credentials provider to avoid getting spammed by relentless
+ // keychain access prompts.
+ //
+ // However, if the `ZED_DEVELOPMENT_USE_KEYCHAIN` environment
+ // variable is set, we will use the actual keychain.
+ !*ZED_DEVELOPMENT_USE_KEYCHAIN
+ }
+ Some(ReleaseChannel::Nightly | ReleaseChannel::Preview | ReleaseChannel::Stable) | None => {
+ false
+ }
+ };
+
+ if use_development_provider {
+ Arc::new(DevelopmentCredentialsProvider::new())
+ } else {
+ Arc::new(KeychainCredentialsProvider)
+ }
+}
+
+/// A credentials provider that stores credentials in the system keychain.
+struct KeychainCredentialsProvider;
+
+impl CredentialsProvider for KeychainCredentialsProvider {
+ fn read_credentials<'a>(
+ &'a self,
+ url: &'a str,
+ cx: &'a AsyncApp,
+ ) -> Pin<Box<dyn Future<Output = Result<Option<(String, Vec<u8>)>>> + 'a>> {
+ async move { cx.update(|cx| cx.read_credentials(url)).await }.boxed_local()
+ }
+
+ fn write_credentials<'a>(
+ &'a self,
+ url: &'a str,
+ username: &'a str,
+ password: &'a [u8],
+ cx: &'a AsyncApp,
+ ) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {
+ async move {
+ cx.update(move |cx| cx.write_credentials(url, username, password))
+ .await
+ }
+ .boxed_local()
+ }
+
+ fn delete_credentials<'a>(
+ &'a self,
+ url: &'a str,
+ cx: &'a AsyncApp,
+ ) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {
+ async move { cx.update(move |cx| cx.delete_credentials(url)).await }.boxed_local()
+ }
+}
+
+/// A credentials provider that stores credentials in a local file.
+///
+/// This MUST only be used in development, as this is not a secure way of storing
+/// credentials on user machines.
+///
+/// Its existence is purely to work around the annoyance of having to constantly
+/// re-allow access to the system keychain when developing Zed.
+struct DevelopmentCredentialsProvider {
+ path: PathBuf,
+}
+
+impl DevelopmentCredentialsProvider {
+ fn new() -> Self {
+ let path = paths::config_dir().join("development_credentials");
+
+ Self { path }
+ }
+
+ fn load_credentials(&self) -> Result<HashMap<String, (String, Vec<u8>)>> {
+ let json = std::fs::read(&self.path)?;
+ let credentials: HashMap<String, (String, Vec<u8>)> = serde_json::from_slice(&json)?;
+
+ Ok(credentials)
+ }
+
+ fn save_credentials(&self, credentials: &HashMap<String, (String, Vec<u8>)>) -> Result<()> {
+ let json = serde_json::to_string(credentials)?;
+ std::fs::write(&self.path, json)?;
+
+ Ok(())
+ }
+}
+
+impl CredentialsProvider for DevelopmentCredentialsProvider {
+ fn read_credentials<'a>(
+ &'a self,
+ url: &'a str,
+ _cx: &'a AsyncApp,
+ ) -> Pin<Box<dyn Future<Output = Result<Option<(String, Vec<u8>)>>> + 'a>> {
+ async move {
+ Ok(self
+ .load_credentials()
+ .unwrap_or_default()
+ .get(url)
+ .cloned())
+ }
+ .boxed_local()
+ }
+
+ fn write_credentials<'a>(
+ &'a self,
+ url: &'a str,
+ username: &'a str,
+ password: &'a [u8],
+ _cx: &'a AsyncApp,
+ ) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {
+ async move {
+ let mut credentials = self.load_credentials().unwrap_or_default();
+ credentials.insert(url.to_string(), (username.to_string(), password.to_vec()));
+
+ self.save_credentials(&credentials)
+ }
+ .boxed_local()
+ }
+
+ fn delete_credentials<'a>(
+ &'a self,
+ url: &'a str,
+ _cx: &'a AsyncApp,
+ ) -> Pin<Box<dyn Future<Output = Result<()>> + 'a>> {
+ async move {
+ let mut credentials = self.load_credentials()?;
+ credentials.remove(url);
+
+ self.save_credentials(&credentials)
+ }
+ .boxed_local()
+ }
+}
@@ -15,4 +15,4 @@ path = "src/zed_env_vars.rs"
default = []
[dependencies]
-gpui.workspace = true
+env_var.workspace = true
@@ -1,45 +1,6 @@
-use gpui::SharedString;
+pub use env_var::{EnvVar, bool_env_var, env_var};
use std::sync::LazyLock;
/// Whether Zed is running in stateless mode.
/// When true, Zed will use in-memory databases instead of persistent storage.
pub static ZED_STATELESS: LazyLock<bool> = bool_env_var!("ZED_STATELESS");
-
-#[derive(Clone)]
-pub struct EnvVar {
- pub name: SharedString,
- /// Value of the environment variable. Also `None` when set to an empty string.
- pub value: Option<String>,
-}
-
-impl EnvVar {
- pub fn new(name: SharedString) -> Self {
- let value = std::env::var(name.as_str()).ok();
- if value.as_ref().is_some_and(|v| v.is_empty()) {
- Self { name, value: None }
- } else {
- Self { name, value }
- }
- }
-
- pub fn or(self, other: EnvVar) -> EnvVar {
- if self.value.is_some() { self } else { other }
- }
-}
-
-/// Creates a `LazyLock<EnvVar>` expression for use in a `static` declaration.
-#[macro_export]
-macro_rules! env_var {
- ($name:expr) => {
- ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into()))
- };
-}
-
-/// Generates a `LazyLock<bool>` expression for use in a `static` declaration. Checks if the
-/// environment variable exists and is non-empty.
-#[macro_export]
-macro_rules! bool_env_var {
- ($name:expr) => {
- ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into()).value.is_some())
- };
-}
@@ -13,6 +13,7 @@ path = "src/zeta_prompt.rs"
[dependencies]
anyhow.workspace = true
+imara-diff.workspace = true
serde.workspace = true
strum.workspace = true
@@ -0,0 +1,1406 @@
+use std::{
+ borrow::Cow,
+ fmt::{Display, Write},
+ mem,
+ ops::Range,
+};
+
+use anyhow::{Context as _, Result, anyhow};
+use imara_diff::{
+ Algorithm, Sink, diff,
+ intern::{InternedInput, Interner, Token},
+};
+
+pub fn strip_diff_path_prefix<'a>(diff: &'a str, prefix: &str) -> Cow<'a, str> {
+ if prefix.is_empty() {
+ return Cow::Borrowed(diff);
+ }
+
+ let prefix_with_slash = format!("{}/", prefix);
+ let mut needs_rewrite = false;
+
+ for line in diff.lines() {
+ match DiffLine::parse(line) {
+ DiffLine::OldPath { path } | DiffLine::NewPath { path } => {
+ if path.starts_with(&prefix_with_slash) {
+ needs_rewrite = true;
+ break;
+ }
+ }
+ _ => {}
+ }
+ }
+
+ if !needs_rewrite {
+ return Cow::Borrowed(diff);
+ }
+
+ let mut result = String::with_capacity(diff.len());
+ for line in diff.lines() {
+ match DiffLine::parse(line) {
+ DiffLine::OldPath { path } => {
+ let stripped = path
+ .strip_prefix(&prefix_with_slash)
+ .unwrap_or(path.as_ref());
+ result.push_str(&format!("--- a/{}\n", stripped));
+ }
+ DiffLine::NewPath { path } => {
+ let stripped = path
+ .strip_prefix(&prefix_with_slash)
+ .unwrap_or(path.as_ref());
+ result.push_str(&format!("+++ b/{}\n", stripped));
+ }
+ _ => {
+ result.push_str(line);
+ result.push('\n');
+ }
+ }
+ }
+
+ Cow::Owned(result)
+}
+
+/// Strip unnecessary git metadata lines from a diff, keeping only the lines
+/// needed for patch application: path headers (--- and +++), hunk headers (@@),
+/// and content lines (+, -, space).
+pub fn strip_diff_metadata(diff: &str) -> String {
+ let mut result = String::new();
+
+ for line in diff.lines() {
+ let dominated = DiffLine::parse(line);
+ match dominated {
+ // Keep path headers, hunk headers, and content lines
+ DiffLine::OldPath { .. }
+ | DiffLine::NewPath { .. }
+ | DiffLine::HunkHeader(_)
+ | DiffLine::Context(_)
+ | DiffLine::Deletion(_)
+ | DiffLine::Addition(_)
+ | DiffLine::NoNewlineAtEOF => {
+ result.push_str(line);
+ result.push('\n');
+ }
+ // Skip garbage lines (diff --git, index, etc.)
+ DiffLine::Garbage(_) => {}
+ }
+ }
+
+ result
+}
+
+/// Marker used to encode cursor position in patch comment lines.
+pub const CURSOR_POSITION_MARKER: &str = "[CURSOR_POSITION]";
+
+/// Extract cursor offset from a patch and return `(clean_patch, cursor_offset)`.
+///
+/// Cursor position is encoded as a comment line (starting with `#`) containing
+/// `[CURSOR_POSITION]`. A `^` in the line indicates the cursor column; a `<`
+/// indicates column 0. The offset is computed relative to addition (`+`) and
+/// context (` `) lines accumulated so far in the hunk, which represent the
+/// cursor position within the new text contributed by the hunk.
+pub fn extract_cursor_from_patch(patch: &str) -> (String, Option<usize>) {
+ let mut clean_patch = String::new();
+ let mut cursor_offset: Option<usize> = None;
+ let mut line_start_offset = 0usize;
+ let mut prev_line_start_offset = 0usize;
+
+ for line in patch.lines() {
+ let diff_line = DiffLine::parse(line);
+
+ match &diff_line {
+ DiffLine::Garbage(content)
+ if content.starts_with('#') && content.contains(CURSOR_POSITION_MARKER) =>
+ {
+ let caret_column = if let Some(caret_pos) = content.find('^') {
+ caret_pos
+ } else if content.find('<').is_some() {
+ 0
+ } else {
+ continue;
+ };
+ let cursor_column = caret_column.saturating_sub('#'.len_utf8());
+ cursor_offset = Some(prev_line_start_offset + cursor_column);
+ }
+ _ => {
+ if !clean_patch.is_empty() {
+ clean_patch.push('\n');
+ }
+ clean_patch.push_str(line);
+
+ match diff_line {
+ DiffLine::Addition(content) | DiffLine::Context(content) => {
+ prev_line_start_offset = line_start_offset;
+ line_start_offset += content.len() + 1;
+ }
+ _ => {}
+ }
+ }
+ }
+ }
+
+ if patch.ends_with('\n') && !clean_patch.is_empty() {
+ clean_patch.push('\n');
+ }
+
+ (clean_patch, cursor_offset)
+}
+
+/// Find all byte offsets where `hunk.context` occurs as a substring of `text`.
+///
+/// If no exact matches are found and the context ends with `'\n'` but `text`
+/// does not, retries without the trailing newline, accepting only a match at
+/// the very end of `text`. When this fallback fires, the hunk's context is
+/// trimmed and its edit ranges are clamped so that downstream code doesn't
+/// index past the end of the matched region. This handles diffs that are
+/// missing a `\ No newline at end of file` marker: the parser always appends
+/// `'\n'` via `writeln!`, so the context can have a trailing newline that
+/// doesn't exist in the source text.
+pub fn find_context_candidates(text: &str, hunk: &mut Hunk) -> Vec<usize> {
+ let candidates: Vec<usize> = text
+ .match_indices(&hunk.context)
+ .map(|(offset, _)| offset)
+ .collect();
+
+ if !candidates.is_empty() {
+ return candidates;
+ }
+
+ if hunk.context.ends_with('\n') && !hunk.context.is_empty() {
+ let old_len = hunk.context.len();
+ hunk.context.pop();
+ let new_len = hunk.context.len();
+
+ if !hunk.context.is_empty() {
+ let candidates: Vec<usize> = text
+ .match_indices(&hunk.context)
+ .filter(|(offset, _)| offset + new_len == text.len())
+ .map(|(offset, _)| offset)
+ .collect();
+
+ if !candidates.is_empty() {
+ for edit in &mut hunk.edits {
+ let touched_phantom = edit.range.end > new_len;
+ edit.range.start = edit.range.start.min(new_len);
+ edit.range.end = edit.range.end.min(new_len);
+ if touched_phantom {
+ // The replacement text was also written with a
+ // trailing '\n' that corresponds to the phantom
+ // newline we just removed from the context.
+ if edit.text.ends_with('\n') {
+ edit.text.pop();
+ }
+ }
+ }
+ return candidates;
+ }
+
+ // Restore if fallback didn't help either.
+ hunk.context.push('\n');
+ debug_assert_eq!(hunk.context.len(), old_len);
+ } else {
+ hunk.context.push('\n');
+ }
+ }
+
+ Vec::new()
+}
+
+/// Given multiple candidate offsets where context matches, use line numbers to disambiguate.
+/// Returns the offset that matches the expected line, or None if no match or no line number available.
+pub fn disambiguate_by_line_number(
+ candidates: &[usize],
+ expected_line: Option<u32>,
+ offset_to_line: &dyn Fn(usize) -> u32,
+) -> Option<usize> {
+ match candidates.len() {
+ 0 => None,
+ 1 => Some(candidates[0]),
+ _ => {
+ let expected = expected_line?;
+ candidates
+ .iter()
+ .copied()
+ .find(|&offset| offset_to_line(offset) == expected)
+ }
+ }
+}
+
+pub fn unified_diff_with_context(
+ old_text: &str,
+ new_text: &str,
+ old_start_line: u32,
+ new_start_line: u32,
+ context_lines: u32,
+) -> String {
+ let input = InternedInput::new(old_text, new_text);
+ diff(
+ Algorithm::Histogram,
+ &input,
+ OffsetUnifiedDiffBuilder::new(&input, old_start_line, new_start_line, context_lines),
+ )
+}
+
+struct OffsetUnifiedDiffBuilder<'a> {
+ before: &'a [Token],
+ after: &'a [Token],
+ interner: &'a Interner<&'a str>,
+ pos: u32,
+ before_hunk_start: u32,
+ after_hunk_start: u32,
+ before_hunk_len: u32,
+ after_hunk_len: u32,
+ old_line_offset: u32,
+ new_line_offset: u32,
+ context_lines: u32,
+ buffer: String,
+ dst: String,
+}
+
+impl<'a> OffsetUnifiedDiffBuilder<'a> {
+ fn new(
+ input: &'a InternedInput<&'a str>,
+ old_line_offset: u32,
+ new_line_offset: u32,
+ context_lines: u32,
+ ) -> Self {
+ Self {
+ before_hunk_start: 0,
+ after_hunk_start: 0,
+ before_hunk_len: 0,
+ after_hunk_len: 0,
+ old_line_offset,
+ new_line_offset,
+ context_lines,
+ buffer: String::with_capacity(8),
+ dst: String::new(),
+ interner: &input.interner,
+ before: &input.before,
+ after: &input.after,
+ pos: 0,
+ }
+ }
+
+ fn print_tokens(&mut self, tokens: &[Token], prefix: char) {
+ for &token in tokens {
+ writeln!(&mut self.buffer, "{prefix}{}", self.interner[token]).unwrap();
+ }
+ }
+
+ fn flush(&mut self) {
+ if self.before_hunk_len == 0 && self.after_hunk_len == 0 {
+ return;
+ }
+
+ let end = (self.pos + self.context_lines).min(self.before.len() as u32);
+ self.update_pos(end, end);
+
+ writeln!(
+ &mut self.dst,
+ "@@ -{},{} +{},{} @@",
+ self.before_hunk_start + 1 + self.old_line_offset,
+ self.before_hunk_len,
+ self.after_hunk_start + 1 + self.new_line_offset,
+ self.after_hunk_len,
+ )
+ .unwrap();
+ write!(&mut self.dst, "{}", &self.buffer).unwrap();
+ self.buffer.clear();
+ self.before_hunk_len = 0;
+ self.after_hunk_len = 0;
+ }
+
+ fn update_pos(&mut self, print_to: u32, move_to: u32) {
+ self.print_tokens(&self.before[self.pos as usize..print_to as usize], ' ');
+ let len = print_to - self.pos;
+ self.before_hunk_len += len;
+ self.after_hunk_len += len;
+ self.pos = move_to;
+ }
+}
+
+impl Sink for OffsetUnifiedDiffBuilder<'_> {
+ type Out = String;
+
+ fn process_change(&mut self, before: Range<u32>, after: Range<u32>) {
+ if before.start - self.pos > self.context_lines * 2 {
+ self.flush();
+ }
+ if self.before_hunk_len == 0 && self.after_hunk_len == 0 {
+ self.pos = before.start.saturating_sub(self.context_lines);
+ self.before_hunk_start = self.pos;
+ self.after_hunk_start = after.start.saturating_sub(self.context_lines);
+ }
+
+ self.update_pos(before.start, before.end);
+ self.before_hunk_len += before.end - before.start;
+ self.after_hunk_len += after.end - after.start;
+ self.print_tokens(
+ &self.before[before.start as usize..before.end as usize],
+ '-',
+ );
+ self.print_tokens(&self.after[after.start as usize..after.end as usize], '+');
+ }
+
+ fn finish(mut self) -> Self::Out {
+ self.flush();
+ self.dst
+ }
+}
+
+pub fn encode_cursor_in_patch(patch: &str, cursor_offset: Option<usize>) -> String {
+ let Some(cursor_offset) = cursor_offset else {
+ return patch.to_string();
+ };
+
+ let mut result = String::new();
+ let mut line_start_offset = 0usize;
+
+ for line in patch.lines() {
+ if matches!(
+ DiffLine::parse(line),
+ DiffLine::Garbage(content)
+ if content.starts_with('#') && content.contains(CURSOR_POSITION_MARKER)
+ ) {
+ continue;
+ }
+
+ if !result.is_empty() {
+ result.push('\n');
+ }
+ result.push_str(line);
+
+ match DiffLine::parse(line) {
+ DiffLine::Addition(content) => {
+ let line_end_offset = line_start_offset + content.len();
+
+ if cursor_offset >= line_start_offset && cursor_offset <= line_end_offset {
+ let cursor_column = cursor_offset - line_start_offset;
+
+ result.push('\n');
+ result.push('#');
+ for _ in 0..cursor_column {
+ result.push(' ');
+ }
+ write!(result, "^{}", CURSOR_POSITION_MARKER).unwrap();
+ }
+
+ line_start_offset = line_end_offset + 1;
+ }
+ DiffLine::Context(content) => {
+ line_start_offset += content.len() + 1;
+ }
+ _ => {}
+ }
+ }
+
+ if patch.ends_with('\n') {
+ result.push('\n');
+ }
+
+ result
+}
+
+pub fn apply_diff_to_string(diff_str: &str, text: &str) -> Result<String> {
+ apply_diff_to_string_with_hunk_offset(diff_str, text).map(|(text, _)| text)
+}
+
+/// Applies a diff to a string and returns the result along with the offset where
+/// the first hunk's context matched in the original text. This offset can be used
+/// to adjust cursor positions that are relative to the hunk's content.
+pub fn apply_diff_to_string_with_hunk_offset(
+ diff_str: &str,
+ text: &str,
+) -> Result<(String, Option<usize>)> {
+ let mut diff = DiffParser::new(diff_str);
+
+ let mut text = text.to_string();
+ let mut first_hunk_offset = None;
+
+ while let Some(event) = diff.next().context("Failed to parse diff")? {
+ match event {
+ DiffEvent::Hunk {
+ mut hunk,
+ path: _,
+ status: _,
+ } => {
+ let candidates = find_context_candidates(&text, &mut hunk);
+
+ let hunk_offset =
+ disambiguate_by_line_number(&candidates, hunk.start_line, &|offset| {
+ text[..offset].matches('\n').count() as u32
+ })
+ .ok_or_else(|| anyhow!("couldn't resolve hunk"))?;
+
+ if first_hunk_offset.is_none() {
+ first_hunk_offset = Some(hunk_offset);
+ }
+
+ for edit in hunk.edits.iter().rev() {
+ let range = (hunk_offset + edit.range.start)..(hunk_offset + edit.range.end);
+ text.replace_range(range, &edit.text);
+ }
+ }
+ DiffEvent::FileEnd { .. } => {}
+ }
+ }
+
+ Ok((text, first_hunk_offset))
+}
+
+struct PatchFile<'a> {
+ old_path: Cow<'a, str>,
+ new_path: Cow<'a, str>,
+}
+
+pub struct DiffParser<'a> {
+ current_file: Option<PatchFile<'a>>,
+ current_line: Option<(&'a str, DiffLine<'a>)>,
+ hunk: Hunk,
+ diff: std::str::Lines<'a>,
+ pending_start_line: Option<u32>,
+ processed_no_newline: bool,
+ last_diff_op: LastDiffOp,
+}
+
+#[derive(Clone, Copy, Default)]
+enum LastDiffOp {
+ #[default]
+ None,
+ Context,
+ Deletion,
+ Addition,
+}
+
+#[derive(Debug, PartialEq)]
+pub enum DiffEvent<'a> {
+ Hunk {
+ path: Cow<'a, str>,
+ hunk: Hunk,
+ status: FileStatus,
+ },
+ FileEnd {
+ renamed_to: Option<Cow<'a, str>>,
+ },
+}
+
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub enum FileStatus {
+ Created,
+ Modified,
+ Deleted,
+}
+
+#[derive(Debug, Default, PartialEq)]
+pub struct Hunk {
+ pub context: String,
+ pub edits: Vec<Edit>,
+ pub start_line: Option<u32>,
+}
+
+impl Hunk {
+ pub fn is_empty(&self) -> bool {
+ self.context.is_empty() && self.edits.is_empty()
+ }
+}
+
+#[derive(Debug, PartialEq)]
+pub struct Edit {
+ pub range: Range<usize>,
+ pub text: String,
+}
+
+impl<'a> DiffParser<'a> {
+ pub fn new(diff: &'a str) -> Self {
+ let mut diff = diff.lines();
+ let current_line = diff.next().map(|line| (line, DiffLine::parse(line)));
+ DiffParser {
+ current_file: None,
+ hunk: Hunk::default(),
+ current_line,
+ diff,
+ pending_start_line: None,
+ processed_no_newline: false,
+ last_diff_op: LastDiffOp::None,
+ }
+ }
+
+ pub fn next(&mut self) -> Result<Option<DiffEvent<'a>>> {
+ loop {
+ let (hunk_done, file_done) = match self.current_line.as_ref().map(|e| &e.1) {
+ Some(DiffLine::OldPath { .. }) | Some(DiffLine::Garbage(_)) | None => (true, true),
+ Some(DiffLine::HunkHeader(_)) => (true, false),
+ _ => (false, false),
+ };
+
+ if hunk_done {
+ if let Some(file) = &self.current_file
+ && !self.hunk.is_empty()
+ {
+ let status = if file.old_path == "/dev/null" {
+ FileStatus::Created
+ } else if file.new_path == "/dev/null" {
+ FileStatus::Deleted
+ } else {
+ FileStatus::Modified
+ };
+ let path = if status == FileStatus::Created {
+ file.new_path.clone()
+ } else {
+ file.old_path.clone()
+ };
+ let mut hunk = mem::take(&mut self.hunk);
+ hunk.start_line = self.pending_start_line.take();
+ self.processed_no_newline = false;
+ self.last_diff_op = LastDiffOp::None;
+ return Ok(Some(DiffEvent::Hunk { path, hunk, status }));
+ }
+ }
+
+ if file_done {
+ if let Some(PatchFile { old_path, new_path }) = self.current_file.take() {
+ return Ok(Some(DiffEvent::FileEnd {
+ renamed_to: if old_path != new_path && old_path != "/dev/null" {
+ Some(new_path)
+ } else {
+ None
+ },
+ }));
+ }
+ }
+
+ let Some((line, parsed_line)) = self.current_line.take() else {
+ break;
+ };
+
+ (|| {
+ match parsed_line {
+ DiffLine::OldPath { path } => {
+ self.current_file = Some(PatchFile {
+ old_path: path,
+ new_path: "".into(),
+ });
+ }
+ DiffLine::NewPath { path } => {
+ if let Some(current_file) = &mut self.current_file {
+ current_file.new_path = path
+ }
+ }
+ DiffLine::HunkHeader(location) => {
+ if let Some(loc) = location {
+ self.pending_start_line = Some(loc.start_line_old);
+ }
+ }
+ DiffLine::Context(ctx) => {
+ if self.current_file.is_some() {
+ writeln!(&mut self.hunk.context, "{ctx}")?;
+ self.last_diff_op = LastDiffOp::Context;
+ }
+ }
+ DiffLine::Deletion(del) => {
+ if self.current_file.is_some() {
+ let range = self.hunk.context.len()
+ ..self.hunk.context.len() + del.len() + '\n'.len_utf8();
+ if let Some(last_edit) = self.hunk.edits.last_mut()
+ && last_edit.range.end == range.start
+ {
+ last_edit.range.end = range.end;
+ } else {
+ self.hunk.edits.push(Edit {
+ range,
+ text: String::new(),
+ });
+ }
+ writeln!(&mut self.hunk.context, "{del}")?;
+ self.last_diff_op = LastDiffOp::Deletion;
+ }
+ }
+ DiffLine::Addition(add) => {
+ if self.current_file.is_some() {
+ let range = self.hunk.context.len()..self.hunk.context.len();
+ if let Some(last_edit) = self.hunk.edits.last_mut()
+ && last_edit.range.end == range.start
+ {
+ writeln!(&mut last_edit.text, "{add}").unwrap();
+ } else {
+ self.hunk.edits.push(Edit {
+ range,
+ text: format!("{add}\n"),
+ });
+ }
+ self.last_diff_op = LastDiffOp::Addition;
+ }
+ }
+ DiffLine::NoNewlineAtEOF => {
+ if !self.processed_no_newline {
+ self.processed_no_newline = true;
+ match self.last_diff_op {
+ LastDiffOp::Addition => {
+ // Remove trailing newline from the last addition
+ if let Some(last_edit) = self.hunk.edits.last_mut() {
+ last_edit.text.pop();
+ }
+ }
+ LastDiffOp::Deletion => {
+ // Remove trailing newline from context (which includes the deletion)
+ self.hunk.context.pop();
+ if let Some(last_edit) = self.hunk.edits.last_mut() {
+ last_edit.range.end -= 1;
+ }
+ }
+ LastDiffOp::Context | LastDiffOp::None => {
+ // Remove trailing newline from context
+ self.hunk.context.pop();
+ }
+ }
+ }
+ }
+ DiffLine::Garbage(_) => {}
+ }
+
+ anyhow::Ok(())
+ })()
+ .with_context(|| format!("on line:\n\n```\n{}```", line))?;
+
+ self.current_line = self.diff.next().map(|line| (line, DiffLine::parse(line)));
+ }
+
+ anyhow::Ok(None)
+ }
+}
+
+#[derive(Debug, PartialEq)]
+pub enum DiffLine<'a> {
+ OldPath { path: Cow<'a, str> },
+ NewPath { path: Cow<'a, str> },
+ HunkHeader(Option<HunkLocation>),
+ Context(&'a str),
+ Deletion(&'a str),
+ Addition(&'a str),
+ NoNewlineAtEOF,
+ Garbage(&'a str),
+}
+
+#[derive(Debug, PartialEq)]
+pub struct HunkLocation {
+ pub start_line_old: u32,
+ pub count_old: u32,
+ pub start_line_new: u32,
+ pub count_new: u32,
+}
+
+impl<'a> DiffLine<'a> {
+ pub fn parse(line: &'a str) -> Self {
+ Self::try_parse(line).unwrap_or(Self::Garbage(line))
+ }
+
+ fn try_parse(line: &'a str) -> Option<Self> {
+ if line.starts_with("\\ No newline") {
+ return Some(Self::NoNewlineAtEOF);
+ }
+ if let Some(header) = line.strip_prefix("---").and_then(eat_required_whitespace) {
+ let path = parse_header_path("a/", header);
+ Some(Self::OldPath { path })
+ } else if let Some(header) = line.strip_prefix("+++").and_then(eat_required_whitespace) {
+ Some(Self::NewPath {
+ path: parse_header_path("b/", header),
+ })
+ } else if let Some(header) = line.strip_prefix("@@").and_then(eat_required_whitespace) {
+ if header.starts_with("...") {
+ return Some(Self::HunkHeader(None));
+ }
+
+ let mut tokens = header.split_whitespace();
+ let old_range = tokens.next()?.strip_prefix('-')?;
+ let new_range = tokens.next()?.strip_prefix('+')?;
+
+ let (start_line_old, count_old) = old_range.split_once(',').unwrap_or((old_range, "1"));
+ let (start_line_new, count_new) = new_range.split_once(',').unwrap_or((new_range, "1"));
+
+ Some(Self::HunkHeader(Some(HunkLocation {
+ start_line_old: start_line_old.parse::<u32>().ok()?.saturating_sub(1),
+ count_old: count_old.parse().ok()?,
+ start_line_new: start_line_new.parse::<u32>().ok()?.saturating_sub(1),
+ count_new: count_new.parse().ok()?,
+ })))
+ } else if let Some(deleted_header) = line.strip_prefix("-") {
+ Some(Self::Deletion(deleted_header))
+ } else if line.is_empty() {
+ Some(Self::Context(""))
+ } else if let Some(context) = line.strip_prefix(" ") {
+ Some(Self::Context(context))
+ } else {
+ Some(Self::Addition(line.strip_prefix("+")?))
+ }
+ }
+}
+
+impl<'a> Display for DiffLine<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ DiffLine::OldPath { path } => write!(f, "--- {path}"),
+ DiffLine::NewPath { path } => write!(f, "+++ {path}"),
+ DiffLine::HunkHeader(Some(hunk_location)) => {
+ write!(
+ f,
+ "@@ -{},{} +{},{} @@",
+ hunk_location.start_line_old + 1,
+ hunk_location.count_old,
+ hunk_location.start_line_new + 1,
+ hunk_location.count_new
+ )
+ }
+ DiffLine::HunkHeader(None) => write!(f, "@@ ... @@"),
+ DiffLine::Context(content) => write!(f, " {content}"),
+ DiffLine::Deletion(content) => write!(f, "-{content}"),
+ DiffLine::Addition(content) => write!(f, "+{content}"),
+ DiffLine::NoNewlineAtEOF => write!(f, "\\ No newline at end of file"),
+ DiffLine::Garbage(line) => write!(f, "{line}"),
+ }
+ }
+}
+
+fn parse_header_path<'a>(strip_prefix: &'static str, header: &'a str) -> Cow<'a, str> {
+ if !header.contains(['"', '\\']) {
+ let path = header.split_ascii_whitespace().next().unwrap_or(header);
+ return Cow::Borrowed(path.strip_prefix(strip_prefix).unwrap_or(path));
+ }
+
+ let mut path = String::with_capacity(header.len());
+ let mut in_quote = false;
+ let mut chars = header.chars().peekable();
+ let mut strip_prefix = Some(strip_prefix);
+
+ while let Some(char) = chars.next() {
+ if char == '"' {
+ in_quote = !in_quote;
+ } else if char == '\\' {
+ let Some(&next_char) = chars.peek() else {
+ break;
+ };
+ chars.next();
+ path.push(next_char);
+ } else if char.is_ascii_whitespace() && !in_quote {
+ break;
+ } else {
+ path.push(char);
+ }
+
+ if let Some(prefix) = strip_prefix
+ && path == prefix
+ {
+ strip_prefix.take();
+ path.clear();
+ }
+ }
+
+ Cow::Owned(path)
+}
+
+fn eat_required_whitespace(header: &str) -> Option<&str> {
+ let trimmed = header.trim_ascii_start();
+
+ if trimmed.len() == header.len() {
+ None
+ } else {
+ Some(trimmed)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use indoc::indoc;
+
+ #[test]
+ fn parse_lines_simple() {
+ let input = indoc! {"
+ diff --git a/text.txt b/text.txt
+ index 86c770d..a1fd855 100644
+ --- a/file.txt
+ +++ b/file.txt
+ @@ -1,2 +1,3 @@
+ context
+ -deleted
+ +inserted
+ garbage
+
+ --- b/file.txt
+ +++ a/file.txt
+ "};
+
+ let lines = input.lines().map(DiffLine::parse).collect::<Vec<_>>();
+
+ assert_eq!(
+ lines,
+ &[
+ DiffLine::Garbage("diff --git a/text.txt b/text.txt"),
+ DiffLine::Garbage("index 86c770d..a1fd855 100644"),
+ DiffLine::OldPath {
+ path: "file.txt".into()
+ },
+ DiffLine::NewPath {
+ path: "file.txt".into()
+ },
+ DiffLine::HunkHeader(Some(HunkLocation {
+ start_line_old: 0,
+ count_old: 2,
+ start_line_new: 0,
+ count_new: 3
+ })),
+ DiffLine::Context("context"),
+ DiffLine::Deletion("deleted"),
+ DiffLine::Addition("inserted"),
+ DiffLine::Garbage("garbage"),
+ DiffLine::Context(""),
+ DiffLine::OldPath {
+ path: "b/file.txt".into()
+ },
+ DiffLine::NewPath {
+ path: "a/file.txt".into()
+ },
+ ]
+ );
+ }
+
+ #[test]
+ fn file_header_extra_space() {
+ let options = ["--- file", "--- file", "---\tfile"];
+
+ for option in options {
+ assert_eq!(
+ DiffLine::parse(option),
+ DiffLine::OldPath {
+ path: "file".into()
+ },
+ "{option}",
+ );
+ }
+ }
+
+ #[test]
+ fn hunk_header_extra_space() {
+ let options = [
+ "@@ -1,2 +1,3 @@",
+ "@@ -1,2 +1,3 @@",
+ "@@\t-1,2\t+1,3\t@@",
+ "@@ -1,2 +1,3 @@",
+ "@@ -1,2 +1,3 @@",
+ "@@ -1,2 +1,3 @@",
+ "@@ -1,2 +1,3 @@ garbage",
+ ];
+
+ for option in options {
+ assert_eq!(
+ DiffLine::parse(option),
+ DiffLine::HunkHeader(Some(HunkLocation {
+ start_line_old: 0,
+ count_old: 2,
+ start_line_new: 0,
+ count_new: 3
+ })),
+ "{option}",
+ );
+ }
+ }
+
+ #[test]
+ fn hunk_header_without_location() {
+ assert_eq!(DiffLine::parse("@@ ... @@"), DiffLine::HunkHeader(None));
+ }
+
+ #[test]
+ fn test_parse_path() {
+ assert_eq!(parse_header_path("a/", "foo.txt"), "foo.txt");
+ assert_eq!(
+ parse_header_path("a/", "foo/bar/baz.txt"),
+ "foo/bar/baz.txt"
+ );
+ assert_eq!(parse_header_path("a/", "a/foo.txt"), "foo.txt");
+ assert_eq!(
+ parse_header_path("a/", "a/foo/bar/baz.txt"),
+ "foo/bar/baz.txt"
+ );
+
+ // Extra
+ assert_eq!(
+ parse_header_path("a/", "a/foo/bar/baz.txt 2025"),
+ "foo/bar/baz.txt"
+ );
+ assert_eq!(
+ parse_header_path("a/", "a/foo/bar/baz.txt\t2025"),
+ "foo/bar/baz.txt"
+ );
+ assert_eq!(
+ parse_header_path("a/", "a/foo/bar/baz.txt \""),
+ "foo/bar/baz.txt"
+ );
+
+ // Quoted
+ assert_eq!(
+ parse_header_path("a/", "a/foo/bar/\"baz quox.txt\""),
+ "foo/bar/baz quox.txt"
+ );
+ assert_eq!(
+ parse_header_path("a/", "\"a/foo/bar/baz quox.txt\""),
+ "foo/bar/baz quox.txt"
+ );
+ assert_eq!(
+ parse_header_path("a/", "\"foo/bar/baz quox.txt\""),
+ "foo/bar/baz quox.txt"
+ );
+ assert_eq!(parse_header_path("a/", "\"whatever 🤷\""), "whatever 🤷");
+ assert_eq!(
+ parse_header_path("a/", "\"foo/bar/baz quox.txt\" 2025"),
+ "foo/bar/baz quox.txt"
+ );
+ // unescaped quotes are dropped
+ assert_eq!(parse_header_path("a/", "foo/\"bar\""), "foo/bar");
+
+ // Escaped
+ assert_eq!(
+ parse_header_path("a/", "\"foo/\\\"bar\\\"/baz.txt\""),
+ "foo/\"bar\"/baz.txt"
+ );
+ assert_eq!(
+ parse_header_path("a/", "\"C:\\\\Projects\\\\My App\\\\old file.txt\""),
+ "C:\\Projects\\My App\\old file.txt"
+ );
+ }
+
+ #[test]
+ fn test_parse_diff_with_leading_and_trailing_garbage() {
+ let diff = indoc! {"
+ I need to make some changes.
+
+ I'll change the following things:
+ - one
+ - two
+ - three
+
+ ```
+ --- a/file.txt
+ +++ b/file.txt
+ one
+ +AND
+ two
+ ```
+
+ Summary of what I did:
+ - one
+ - two
+ - three
+
+ That's about it.
+ "};
+
+ let mut events = Vec::new();
+ let mut parser = DiffParser::new(diff);
+ while let Some(event) = parser.next().unwrap() {
+ events.push(event);
+ }
+
+ assert_eq!(
+ events,
+ &[
+ DiffEvent::Hunk {
+ path: "file.txt".into(),
+ hunk: Hunk {
+ context: "one\ntwo\n".into(),
+ edits: vec![Edit {
+ range: 4..4,
+ text: "AND\n".into()
+ }],
+ start_line: None,
+ },
+ status: FileStatus::Modified,
+ },
+ DiffEvent::FileEnd { renamed_to: None }
+ ],
+ )
+ }
+
+ #[test]
+ fn test_no_newline_at_eof() {
+ let diff = indoc! {"
+ --- a/file.py
+ +++ b/file.py
+ @@ -55,7 +55,3 @@ class CustomDataset(Dataset):
+ torch.set_rng_state(state)
+ mask = self.transform(mask)
+
+ - if self.mode == 'Training':
+ - return (img, mask, name)
+ - else:
+ - return (img, mask, name)
+ \\ No newline at end of file
+ "};
+
+ let mut events = Vec::new();
+ let mut parser = DiffParser::new(diff);
+ while let Some(event) = parser.next().unwrap() {
+ events.push(event);
+ }
+
+ assert_eq!(
+ events,
+ &[
+ DiffEvent::Hunk {
+ path: "file.py".into(),
+ hunk: Hunk {
+ context: concat!(
+ " torch.set_rng_state(state)\n",
+ " mask = self.transform(mask)\n",
+ "\n",
+ " if self.mode == 'Training':\n",
+ " return (img, mask, name)\n",
+ " else:\n",
+ " return (img, mask, name)",
+ )
+ .into(),
+ edits: vec![Edit {
+ range: 80..203,
+ text: "".into()
+ }],
+ start_line: Some(54), // @@ -55,7 -> line 54 (0-indexed)
+ },
+ status: FileStatus::Modified,
+ },
+ DiffEvent::FileEnd { renamed_to: None }
+ ],
+ );
+ }
+
+ #[test]
+ fn test_no_newline_at_eof_addition() {
+ let diff = indoc! {"
+ --- a/file.txt
+ +++ b/file.txt
+ @@ -1,2 +1,3 @@
+ context
+ -deleted
+ +added line
+ \\ No newline at end of file
+ "};
+
+ let mut events = Vec::new();
+ let mut parser = DiffParser::new(diff);
+ while let Some(event) = parser.next().unwrap() {
+ events.push(event);
+ }
+
+ assert_eq!(
+ events,
+ &[
+ DiffEvent::Hunk {
+ path: "file.txt".into(),
+ hunk: Hunk {
+ context: "context\ndeleted\n".into(),
+ edits: vec![Edit {
+ range: 8..16,
+ text: "added line".into()
+ }],
+ start_line: Some(0), // @@ -1,2 -> line 0 (0-indexed)
+ },
+ status: FileStatus::Modified,
+ },
+ DiffEvent::FileEnd { renamed_to: None }
+ ],
+ );
+ }
+
+ #[test]
+ fn test_double_no_newline_at_eof() {
+ // Two consecutive "no newline" markers - the second should be ignored
+ let diff = indoc! {"
+ --- a/file.txt
+ +++ b/file.txt
+ @@ -1,3 +1,3 @@
+ line1
+ -old
+ +new
+ line3
+ \\ No newline at end of file
+ \\ No newline at end of file
+ "};
+
+ let mut events = Vec::new();
+ let mut parser = DiffParser::new(diff);
+ while let Some(event) = parser.next().unwrap() {
+ events.push(event);
+ }
+
+ assert_eq!(
+ events,
+ &[
+ DiffEvent::Hunk {
+ path: "file.txt".into(),
+ hunk: Hunk {
+ context: "line1\nold\nline3".into(), // Only one newline removed
+ edits: vec![Edit {
+ range: 6..10, // "old\n" is 4 bytes
+ text: "new\n".into()
+ }],
+ start_line: Some(0),
+ },
+ status: FileStatus::Modified,
+ },
+ DiffEvent::FileEnd { renamed_to: None }
+ ],
+ );
+ }
+
+ #[test]
+ fn test_no_newline_after_context_not_addition() {
+ // "No newline" after context lines should remove newline from context,
+ // not from an earlier addition
+ let diff = indoc! {"
+ --- a/file.txt
+ +++ b/file.txt
+ @@ -1,4 +1,4 @@
+ line1
+ -old
+ +new
+ line3
+ line4
+ \\ No newline at end of file
+ "};
+
+ let mut events = Vec::new();
+ let mut parser = DiffParser::new(diff);
+ while let Some(event) = parser.next().unwrap() {
+ events.push(event);
+ }
+
+ assert_eq!(
+ events,
+ &[
+ DiffEvent::Hunk {
+ path: "file.txt".into(),
+ hunk: Hunk {
+ // newline removed from line4 (context), not from "new" (addition)
+ context: "line1\nold\nline3\nline4".into(),
+ edits: vec![Edit {
+ range: 6..10, // "old\n" is 4 bytes
+ text: "new\n".into() // Still has newline
+ }],
+ start_line: Some(0),
+ },
+ status: FileStatus::Modified,
+ },
+ DiffEvent::FileEnd { renamed_to: None }
+ ],
+ );
+ }
+
+ #[test]
+ fn test_strip_diff_metadata() {
+ let diff_with_metadata = indoc! {r#"
+ diff --git a/file.txt b/file.txt
+ index 1234567..abcdefg 100644
+ --- a/file.txt
+ +++ b/file.txt
+ @@ -1,3 +1,4 @@
+ context line
+ -removed line
+ +added line
+ more context
+ "#};
+
+ let stripped = strip_diff_metadata(diff_with_metadata);
+
+ assert_eq!(
+ stripped,
+ indoc! {r#"
+ --- a/file.txt
+ +++ b/file.txt
+ @@ -1,3 +1,4 @@
+ context line
+ -removed line
+ +added line
+ more context
+ "#}
+ );
+ }
+
+ #[test]
+ fn test_apply_diff_to_string_no_trailing_newline() {
+ // Text without trailing newline; diff generated without
+ // `\ No newline at end of file` marker.
+ let text = "line1\nline2\nline3";
+ let diff = indoc! {"
+ --- a/file.txt
+ +++ b/file.txt
+ @@ -1,3 +1,3 @@
+ line1
+ -line2
+ +replaced
+ line3
+ "};
+
+ let result = apply_diff_to_string(diff, text).unwrap();
+ assert_eq!(result, "line1\nreplaced\nline3");
+ }
+
+ #[test]
+ fn test_apply_diff_to_string_trailing_newline_present() {
+ // When text has a trailing newline, exact matching still works and
+ // the fallback is never needed.
+ let text = "line1\nline2\nline3\n";
+ let diff = indoc! {"
+ --- a/file.txt
+ +++ b/file.txt
+ @@ -1,3 +1,3 @@
+ line1
+ -line2
+ +replaced
+ line3
+ "};
+
+ let result = apply_diff_to_string(diff, text).unwrap();
+ assert_eq!(result, "line1\nreplaced\nline3\n");
+ }
+
+ #[test]
+ fn test_apply_diff_to_string_deletion_at_end_no_trailing_newline() {
+ // Deletion of the last line when text has no trailing newline.
+ // The edit range must be clamped so it doesn't index past the
+ // end of the text.
+ let text = "line1\nline2\nline3";
+ let diff = indoc! {"
+ --- a/file.txt
+ +++ b/file.txt
+ @@ -1,3 +1,2 @@
+ line1
+ line2
+ -line3
+ "};
+
+ let result = apply_diff_to_string(diff, text).unwrap();
+ assert_eq!(result, "line1\nline2\n");
+ }
+
+ #[test]
+ fn test_apply_diff_to_string_replace_last_line_no_trailing_newline() {
+ // Replace the last line when text has no trailing newline.
+ let text = "aaa\nbbb\nccc";
+ let diff = indoc! {"
+ --- a/file.txt
+ +++ b/file.txt
+ @@ -1,3 +1,3 @@
+ aaa
+ bbb
+ -ccc
+ +ddd
+ "};
+
+ let result = apply_diff_to_string(diff, text).unwrap();
+ assert_eq!(result, "aaa\nbbb\nddd");
+ }
+
+ #[test]
+ fn test_apply_diff_to_string_multibyte_no_trailing_newline() {
+ // Multi-byte UTF-8 characters near the end; ensures char boundary
+ // safety when the fallback clamps edit ranges.
+ let text = "hello\n세계";
+ let diff = indoc! {"
+ --- a/file.txt
+ +++ b/file.txt
+ @@ -1,2 +1,2 @@
+ hello
+ -세계
+ +world
+ "};
+
+ let result = apply_diff_to_string(diff, text).unwrap();
+ assert_eq!(result, "hello\nworld");
+ }
+
+ #[test]
+ fn test_find_context_candidates_no_false_positive_mid_text() {
+ // The stripped fallback must only match at the end of text, not in
+ // the middle where a real newline exists.
+ let text = "aaa\nbbb\nccc\n";
+ let mut hunk = Hunk {
+ context: "bbb\n".into(),
+ edits: vec![],
+ start_line: None,
+ };
+
+ let candidates = find_context_candidates(text, &mut hunk);
+ // Exact match at offset 4 — the fallback is not used.
+ assert_eq!(candidates, vec![4]);
+ }
+
+ #[test]
+ fn test_find_context_candidates_fallback_at_end() {
+ let text = "aaa\nbbb";
+ let mut hunk = Hunk {
+ context: "bbb\n".into(),
+ edits: vec![],
+ start_line: None,
+ };
+
+ let candidates = find_context_candidates(text, &mut hunk);
+ assert_eq!(candidates, vec![4]);
+ // Context should be stripped.
+ assert_eq!(hunk.context, "bbb");
+ }
+
+ #[test]
+ fn test_find_context_candidates_no_fallback_mid_text() {
+ // "bbb" appears mid-text followed by a newline, so the exact
+ // match succeeds. Verify the stripped fallback doesn't produce a
+ // second, spurious candidate.
+ let text = "aaa\nbbb\nccc";
+ let mut hunk = Hunk {
+ context: "bbb\nccc\n".into(),
+ edits: vec![],
+ start_line: None,
+ };
+
+ let candidates = find_context_candidates(text, &mut hunk);
+ // No exact match (text ends without newline after "ccc"), but the
+ // stripped context "bbb\nccc" matches at offset 4, which is the end.
+ assert_eq!(candidates, vec![4]);
+ assert_eq!(hunk.context, "bbb\nccc");
+ }
+
+ #[test]
+ fn test_find_context_candidates_clamps_edit_ranges() {
+ let text = "aaa\nbbb";
+ let mut hunk = Hunk {
+ context: "aaa\nbbb\n".into(),
+ edits: vec![Edit {
+ range: 4..8, // "bbb\n" — end points at the trailing \n
+ text: "ccc\n".into(),
+ }],
+ start_line: None,
+ };
+
+ let candidates = find_context_candidates(text, &mut hunk);
+ assert_eq!(candidates, vec![0]);
+ // Edit range end should be clamped to 7 (new context length).
+ assert_eq!(hunk.edits[0].range, 4..7);
+ }
+
+ #[test]
+ fn test_unified_diff_with_context_matches_expected_context_window() {
+ let old_text = "line1\nline2\nline3\nline4\nline5\nCHANGE_ME\nline7\nline8\n";
+ let new_text = "line1\nline2\nline3\nline4\nline5\nCHANGED\nline7\nline8\n";
+
+ let diff_default = unified_diff_with_context(old_text, new_text, 0, 0, 3);
+ assert_eq!(
+ diff_default,
+ "@@ -3,6 +3,6 @@\n line3\n line4\n line5\n-CHANGE_ME\n+CHANGED\n line7\n line8\n"
+ );
+
+ let diff_full_context = unified_diff_with_context(old_text, new_text, 0, 0, 8);
+ assert_eq!(
+ diff_full_context,
+ "@@ -1,8 +1,8 @@\n line1\n line2\n line3\n line4\n line5\n-CHANGE_ME\n+CHANGED\n line7\n line8\n"
+ );
+
+ let diff_no_context = unified_diff_with_context(old_text, new_text, 0, 0, 0);
+ assert_eq!(diff_no_context, "@@ -6,1 +6,1 @@\n-CHANGE_ME\n+CHANGED\n");
+ }
+}
@@ -1,5 +1,6 @@
pub mod excerpt_ranges;
pub mod multi_region;
+pub mod udiff;
use anyhow::{Result, anyhow};
use serde::{Deserialize, Serialize};
@@ -105,10 +106,19 @@ impl std::fmt::Display for ZetaFormat {
impl ZetaFormat {
pub fn parse(format_name: &str) -> Result<Self> {
+ let lower = format_name.to_lowercase();
+
+ // Exact case-insensitive match takes priority, bypassing ambiguity checks.
+ for variant in ZetaFormat::iter() {
+ if <&'static str>::from(&variant).to_lowercase() == lower {
+ return Ok(variant);
+ }
+ }
+
let mut results = ZetaFormat::iter().filter(|version| {
<&'static str>::from(version)
.to_lowercase()
- .contains(&format_name.to_lowercase())
+ .contains(&lower)
});
let Some(result) = results.next() else {
anyhow::bail!(
@@ -819,11 +829,146 @@ pub fn encode_patch_as_output_for_format(
}
}
+/// Given a `ZetaPromptInput`, a format, and a patch (with cursor already
+/// extracted), produce the expected model output string for training.
+pub fn format_expected_output(
+ input: &ZetaPromptInput,
+ format: ZetaFormat,
+ patch: &str,
+ cursor_offset: Option<usize>,
+) -> Result<String> {
+ let (context, editable_range, _, _) = resolve_cursor_region(input, format);
+ let mut old_editable = context[editable_range].to_string();
+ if !old_editable.is_empty() && !old_editable.ends_with('\n') {
+ old_editable.push('\n');
+ }
+
+ // Formats with their own output encoding (hashline, variable-edit,
+ // multi-region empty patches) are handled here.
+ if let Some(output) =
+ encode_patch_as_output_for_format(format, &old_editable, patch, cursor_offset)?
+ {
+ return Ok(output);
+ }
+
+ let empty_patch = patch.lines().count() <= 3;
+
+ match format {
+ // Multi-region formats: non-empty patches need diff application
+ // then marker-span encoding.
+ ZetaFormat::V0316SeedMultiRegions => {
+ let (new_editable, first_hunk_offset) =
+ udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?;
+ let cursor_in_new = cursor_in_new_text(cursor_offset, first_hunk_offset, &new_editable);
+ multi_region::encode_from_old_and_new_v0316(
+ &old_editable,
+ &new_editable,
+ cursor_in_new,
+ CURSOR_MARKER,
+ multi_region::V0316_END_MARKER,
+ )
+ }
+ ZetaFormat::V0318SeedMultiRegions => {
+ let (new_editable, first_hunk_offset) =
+ udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?;
+ let cursor_in_new = cursor_in_new_text(cursor_offset, first_hunk_offset, &new_editable);
+ multi_region::encode_from_old_and_new_v0318(
+ &old_editable,
+ &new_editable,
+ cursor_in_new,
+ CURSOR_MARKER,
+ multi_region::V0318_END_MARKER,
+ )
+ }
+ ZetaFormat::V0317SeedMultiRegions => {
+ let (new_editable, first_hunk_offset) =
+ udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?;
+ let cursor_in_new = cursor_in_new_text(cursor_offset, first_hunk_offset, &new_editable);
+ multi_region::encode_from_old_and_new_v0317(
+ &old_editable,
+ &new_editable,
+ cursor_in_new,
+ CURSOR_MARKER,
+ multi_region::V0317_END_MARKER,
+ )
+ }
+ // V0131-style formats and fallback: produce new editable text with
+ // cursor marker inserted, followed by the end marker.
+ _ => {
+ let (mut result, first_hunk_offset) = if empty_patch {
+ (old_editable.clone(), None)
+ } else {
+ udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?
+ };
+
+ if let Some(cursor) = cursor_offset {
+ let hunk_start = if !empty_patch {
+ first_hunk_offset.unwrap_or(0)
+ } else {
+ 0
+ };
+ let offset = (hunk_start + cursor).min(result.len());
+ result.insert_str(offset, CURSOR_MARKER);
+ }
+
+ if !result.is_empty() && !result.ends_with('\n') {
+ result.push('\n');
+ }
+
+ if let Some(end_marker) = output_end_marker_for_format(format) {
+ result.push_str(end_marker);
+ }
+
+ Ok(result)
+ }
+ }
+}
+
+/// Compute the cursor position within the new text after diff application.
+fn cursor_in_new_text(
+ cursor_offset: Option<usize>,
+ first_hunk_offset: Option<usize>,
+ new_text: &str,
+) -> Option<usize> {
+ cursor_offset.map(|cursor| {
+ let hunk_start = first_hunk_offset.unwrap_or(0);
+ (hunk_start + cursor).min(new_text.len())
+ })
+}
+
+#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ParsedOutput {
/// Text that should replace the editable region
pub new_editable_region: String,
/// The byte range within `cursor_excerpt` that this replacement applies to
pub range_in_excerpt: Range<usize>,
+ /// Byte offset of the cursor marker within `new_editable_region`, if present
+ pub cursor_offset_in_new_editable_region: Option<usize>,
+}
+
+#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
+pub struct CursorPosition {
+ pub path: String,
+ pub row: usize,
+ pub column: usize,
+ pub offset: usize,
+ pub editable_region_offset: usize,
+}
+
+pub fn parsed_output_from_editable_region(
+ range_in_excerpt: Range<usize>,
+ mut new_editable_region: String,
+) -> ParsedOutput {
+ let cursor_offset_in_new_editable_region = new_editable_region.find(CURSOR_MARKER);
+ if let Some(offset) = cursor_offset_in_new_editable_region {
+ new_editable_region.replace_range(offset..offset + CURSOR_MARKER.len(), "");
+ }
+
+ ParsedOutput {
+ new_editable_region,
+ range_in_excerpt,
+ cursor_offset_in_new_editable_region,
+ }
}
/// Parse model output for the given zeta format
@@ -891,12 +1036,97 @@ pub fn parse_zeta2_model_output(
let range_in_excerpt =
range_in_context.start + context_start..range_in_context.end + context_start;
- Ok(ParsedOutput {
- new_editable_region: output,
- range_in_excerpt,
+ Ok(parsed_output_from_editable_region(range_in_excerpt, output))
+}
+
+pub fn parse_zeta2_model_output_as_patch(
+ output: &str,
+ format: ZetaFormat,
+ prompt_inputs: &ZetaPromptInput,
+) -> Result<String> {
+ let parsed = parse_zeta2_model_output(output, format, prompt_inputs)?;
+ parsed_output_to_patch(prompt_inputs, parsed)
+}
+
+pub fn cursor_position_from_parsed_output(
+ prompt_inputs: &ZetaPromptInput,
+ parsed: &ParsedOutput,
+) -> Option<CursorPosition> {
+ let cursor_offset = parsed.cursor_offset_in_new_editable_region?;
+ let editable_region_offset = parsed.range_in_excerpt.start;
+ let excerpt = prompt_inputs.cursor_excerpt.as_ref();
+
+ let editable_region_start_line = excerpt[..editable_region_offset].matches('\n').count();
+
+ let new_editable_region = &parsed.new_editable_region;
+ let prefix_end = cursor_offset.min(new_editable_region.len());
+ let new_region_prefix = &new_editable_region[..prefix_end];
+
+ let row = editable_region_start_line + new_region_prefix.matches('\n').count();
+
+ let column = match new_region_prefix.rfind('\n') {
+ Some(last_newline) => cursor_offset - last_newline - 1,
+ None => {
+ let content_prefix = &excerpt[..editable_region_offset];
+ let content_column = match content_prefix.rfind('\n') {
+ Some(last_newline) => editable_region_offset - last_newline - 1,
+ None => editable_region_offset,
+ };
+ content_column + cursor_offset
+ }
+ };
+
+ Some(CursorPosition {
+ path: prompt_inputs.cursor_path.to_string_lossy().into_owned(),
+ row,
+ column,
+ offset: editable_region_offset + cursor_offset,
+ editable_region_offset: cursor_offset,
})
}
+pub fn parsed_output_to_patch(
+ prompt_inputs: &ZetaPromptInput,
+ parsed: ParsedOutput,
+) -> Result<String> {
+ let range_in_excerpt = parsed.range_in_excerpt;
+ let excerpt = prompt_inputs.cursor_excerpt.as_ref();
+ let old_text = excerpt[range_in_excerpt.clone()].to_string();
+ let mut new_text = parsed.new_editable_region;
+
+ let mut old_text_normalized = old_text;
+ if !new_text.is_empty() && !new_text.ends_with('\n') {
+ new_text.push('\n');
+ }
+ if !old_text_normalized.is_empty() && !old_text_normalized.ends_with('\n') {
+ old_text_normalized.push('\n');
+ }
+
+ let editable_region_offset = range_in_excerpt.start;
+ let editable_region_start_line = excerpt[..editable_region_offset].matches('\n').count() as u32;
+ let editable_region_lines = old_text_normalized.lines().count() as u32;
+
+ let diff = udiff::unified_diff_with_context(
+ &old_text_normalized,
+ &new_text,
+ editable_region_start_line,
+ editable_region_start_line,
+ editable_region_lines,
+ );
+
+ let path = prompt_inputs
+ .cursor_path
+ .to_string_lossy()
+ .trim_start_matches('/')
+ .to_string();
+ let formatted_diff = format!("--- a/{path}\n+++ b/{path}\n{diff}");
+
+ Ok(udiff::encode_cursor_in_patch(
+ &formatted_diff,
+ parsed.cursor_offset_in_new_editable_region,
+ ))
+}
+
pub fn excerpt_range_for_format(
format: ZetaFormat,
ranges: &ExcerptRanges,
@@ -5292,6 +5522,33 @@ mod tests {
assert_eq!(apply_edit(excerpt, &output1), "new content\n");
}
+ #[test]
+ fn test_parsed_output_to_patch_round_trips_through_udiff_application() {
+ let excerpt = "before ctx\nctx start\neditable old\nctx end\nafter ctx\n";
+ let context_start = excerpt.find("ctx start").unwrap();
+ let context_end = excerpt.find("after ctx").unwrap();
+ let editable_start = excerpt.find("editable old").unwrap();
+ let editable_end = editable_start + "editable old\n".len();
+ let input = make_input_with_context_range(
+ excerpt,
+ editable_start..editable_end,
+ context_start..context_end,
+ editable_start,
+ );
+
+ let parsed = parse_zeta2_model_output(
+ "editable new\n>>>>>>> UPDATED\n",
+ ZetaFormat::V0131GitMergeMarkersPrefix,
+ &input,
+ )
+ .unwrap();
+ let expected = apply_edit(excerpt, &parsed);
+ let patch = parsed_output_to_patch(&input, parsed).unwrap();
+ let patched = udiff::apply_diff_to_string(&patch, excerpt).unwrap();
+
+ assert_eq!(patched, expected);
+ }
+
#[test]
fn test_special_tokens_not_triggered_by_comment_separator() {
// Regression test for https://github.com/zed-industries/zed/issues/52489
@@ -4,12 +4,15 @@ Welcome to Zed's documentation.
This is built on push to `main` and published automatically to [https://zed.dev/docs](https://zed.dev/docs).
-To preview the docs locally you will need to install [mdBook](https://rust-lang.github.io/mdBook/) (`cargo install mdbook@0.4.40`) and then run:
+To preview the docs locally you will need to install [mdBook](https://rust-lang.github.io/mdBook/) (`cargo install mdbook@0.4.40`), generate the action metadata, and then serve:
```sh
+script/generate-action-metadata
mdbook serve docs
```
+The first command dumps an action manifest to `crates/docs_preprocessor/actions.json`. Without it, the preprocessor cannot validate keybinding and action references in the docs and will report errors. You only need to re-run it when actions change.
+
It's important to note the version number above. For an unknown reason, as of 2025-04-23, running 0.4.48 will cause odd URL behavior that breaks things.
Before committing, verify that the docs are formatted in the way Prettier expects with:
@@ -67,7 +67,9 @@ Right-click on any agent response in the thread view to access a context menu wi
### Navigating the Thread {#navigating-the-thread}
-In long conversations, use the scroll arrow buttons at the bottom of the panel to jump to your most recent prompt or to the very beginning of the thread.
+In long conversations, use the scroll arrow buttons at the bottom of the panel to jump to your most recent prompt or to the very beginning of the thread. You can also scroll the thread using arrow keys, Page Up/Down, Home/End, and Shift+Page Up/Down to jump between messages, when the thread pane is focused.
+
+When focus is in the message editor, you can also use {#kb agent::ScrollOutputPageUp}, {#kb agent::ScrollOutputPageDown}, {#kb agent::ScrollOutputToTop}, {#kb agent::ScrollOutputToBottom}, {#kb agent::ScrollOutputLineUp}, and {#kb agent::ScrollOutputLineDown} to navigate the thread, or {#kb agent::ScrollOutputToPreviousMessage} and {#kb agent::ScrollOutputToNextMessage} to jump between your prompts.
### Navigating History {#navigating-history}
@@ -292,13 +292,16 @@ The default value is `false`.
### Sound Notification
-Control whether to hear a notification sound when the agent is done generating changes or needs your input.
-The default value is `false`.
+Control whether to hear a notification sound when the agent is done generating changes or needs your input. The default value is `never`.
+
+- `"never"` (default) — Never play the sound.
+- `"when_hidden"` — Only play the sound when the agent panel is not visible.
+- `"always"` — Always play the sound on completion.
```json [settings]
{
"agent": {
- "play_sound_when_agent_done": true
+ "play_sound_when_agent_done": "never"
}
}
```
@@ -173,8 +173,6 @@ git submodule add https://github.com/your-username/foobar-zed.git extensions/my-
git add extensions/my-extension
```
-> **Note:** Your extension must live under te
-
> All extension submodules must use HTTPS URLs and not SSH URLS (`git@github.com`).
2. Add a new entry to the top-level `extensions.toml` file containing your extension:
@@ -143,6 +143,21 @@ This query marks strings, object keys, and numbers for highlighting. The followi
| @variable.parameter | Captures function/method parameters |
| @variant | Captures variants |
+#### Fallback captures
+
+A single Tree-sitter pattern can specify multiple captures on the same node to define fallback highlights.
+Zed resolves them right-to-left: It first tries the rightmost capture, and if the current theme has no style for it, falls back to the next capture to the left, and so on.
+
+For example:
+
+```scheme
+(type_identifier) @type @variable
+```
+
+Here Zed will first try to resolve `@variable` from the theme. If the theme defines a style for `@variable`, that style is used. Otherwise, Zed falls back to `@type`.
+
+This is useful when a language wants to provide a preferred highlight that not all themes may support, while still falling back to a more common capture that most themes define.
+
### Bracket matching
The `brackets.scm` file defines matching brackets.
@@ -15,7 +15,7 @@ See [samply](https://github.com/mstange/samply)'s README on how to install and r
The profile.json does not contain any symbols. Firefox profiler can add the local symbols to the profile for for. To do that hit the upload local profile button in the top right corner.
-<img width="851" height="auto" alt="image" src="https://github.com/user-attachments/assets/cbef2b51-0442-4ee9-bc5c-95f6ccf9be2c" />
+<img width="851" height="auto" alt="image" src="https://github.com/user-attachments/assets/cbef2b51-0442-4ee9-bc5c-95f6ccf9be2c" style="display: block; margin: 0 auto;" />
# In depth CPU profiling (Tracing)
@@ -53,20 +53,40 @@ Download the profiler:
Open the profiler (tracy-profiler), you should see zed in the list of `Discovered clients` click it.
-<img width="392" height="auto" alt="image" src="https://github.com/user-attachments/assets/b6f06fc3-6b25-41c7-ade9-558cc93d6033" />
+<img width="392" height="auto" alt="image" src="https://github.com/user-attachments/assets/b6f06fc3-6b25-41c7-ade9-558cc93d6033" style="display: block; margin: 0 auto;"/>
Tracy is an incredibly powerful profiler which can do a lot however it's UI is not that friendly. This is not the place for an in depth guide to Tracy, I do however want to highlight one particular workflow that is helpful when figuring out why a piece of code is _sometimes_ slow.
Here are the steps:
1. Click the flamechart button at the top.
+
+<img width="1815" height="auto" alt="Click flamechart" src="https://github.com/user-attachments/assets/9b488c60-90fa-4013-a663-f4e35ea753d2" />
+
2. Click on a function that takes a lot of time.
+
+<img width="2001" height="auto" alt="Click snapshot" src="https://github.com/user-attachments/assets/ddb838ed-2c83-4dba-a750-b8a2d4ac6202" />
+
3. Expand the list of function calls by clicking on main thread.
+
+<img width="2313" height="auto" alt="Click main thread" src="https://github.com/user-attachments/assets/465dd883-9d3c-4384-a396-fce68b872d1a" />
+
4. Filter that list to the slower calls then click on one of the slow calls in the list
+
+<img width="2264" height="auto" alt="Select the tail calls in the histogram to filter down the list of calls then click on one call" src="https://github.com/user-attachments/assets/a8fddc7c-f40a-4f11-a648-ca7cc193ff6f" />
+
5. Click zoom to zone to go to that specific function call in the timeline
+
+<img width="1822" height="auto" alt="Click zoom to zone" src="https://github.com/user-attachments/assets/3391664d-7297-41d4-be17-ac9b2e2c85d1" />
+
6. Scroll to zoom in and see more detail about the callers
+
+<img width="1964" height="auto" alt="Scroll to zoom in" src="https://github.com/user-attachments/assets/625c2bf4-a68d-40c4-becb-ade16bc9a8bc" />
+
7. Click on a caller to to get statistics on _it_.
+<img width="1888" height="auto" alt="Click on any of the zones to get statistics" src="https://github.com/user-attachments/assets/7e578825-2b63-4b7f-88f7-0cb16b8a3387" />
+
While normally the blue bars in the Tracy timeline correspond to function calls they can time any part of a codebase. In the example below we have added an extra span "for block in edits" and added metadata to it: the block_height. You can do that like this:
```rust
@@ -74,14 +94,6 @@ let span = ztracing::debug_span!("for block in edits", block_height = block.heig
let _enter = span.enter(); // span guard, when this is dropped the span ends (and its duration is recorded)
```
-<img width="1815" height="auto" alt="Click flamechart" src="https://github.com/user-attachments/assets/9b488c60-90fa-4013-a663-f4e35ea753d2" />
-<img width="2001" height="auto" alt="Click snapshot" src="https://github.com/user-attachments/assets/ddb838ed-2c83-4dba-a750-b8a2d4ac6202" />
-<img width="2313" height="auto" alt="Click main thread" src="https://github.com/user-attachments/assets/465dd883-9d3c-4384-a396-fce68b872d1a" />
-<img width="2264" height="auto" alt="Select the tail calls in the histogram to filter down the list of calls then click on one call" src="https://github.com/user-attachments/assets/a8fddc7c-f40a-4f11-a648-ca7cc193ff6f" />
-<img width="1822" height="auto" alt="Click zoom to zone" src="https://github.com/user-attachments/assets/3391664d-7297-41d4-be17-ac9b2e2c85d1" />
-<img width="1964" height="auto" alt="Scroll to zoom in" src="https://github.com/user-attachments/assets/625c2bf4-a68d-40c4-becb-ade16bc9a8bc" />
-<img width="1888" height="auto" alt="Click on any of the zones to get statistics" src="https://github.com/user-attachments/assets/7e578825-2b63-4b7f-88f7-0cb16b8a3387" />
-
# Task/Async profiling
Get a profile of the zed foreground executor and background executors. Check if
@@ -3002,21 +3002,36 @@ If you wish to exclude certain hosts from using the proxy, set the `NO_PROXY` en
## Profiles
-- Description: Configuration profiles that can be applied on top of existing settings
+- Description: Configuration profiles that can be temporarily applied on top of existing settings or Zed's defaults.
- Setting: `profiles`
- Default: `{}`
**Options**
-Configuration object for defining settings profiles. Example:
+Each profile is an object with the following optional fields:
+
+- `base`: What settings to start from before applying the profile's overrides.
+ - `"user"` (default): Apply on top of your current user settings.
+ - `"default"`: Apply on top of Zed's default settings, ignoring user customizations.
+- `settings`: The settings overrides for this profile.
+
+Examples:
```json [settings]
{
"profiles": {
- "presentation": {
- "buffer_font_size": 20,
- "ui_font_size": 18,
- "theme": "One Light"
+ "Presentation": {
+ "settings": {
+ "buffer_font_size": 20,
+ "ui_font_size": 18,
+ "theme": "One Light"
+ }
+ },
+ "Clean Slate": {
+ "base": "default",
+ "settings": {
+ "theme": "Ayu Dark"
+ }
}
}
}
@@ -5332,12 +5347,12 @@ For example, to use `Nerd Font` as a fallback, add the following to your setting
## Settings Profiles
-- Description: Configure any number of settings profiles that are temporarily applied on top of your existing user settings when selected from `settings profile selector: toggle`.
+- Description: Configure any number of settings profiles that are temporarily applied when selected from `settings profile selector: toggle`.
- Setting: `profiles`
- Default: `{}`
In your `settings.json` file, add the `profiles` object.
-Each key within this object is the name of a settings profile, and each value is an object that can include any of Zed's settings.
+Each key within this object is the name of a settings profile. Each profile has an optional `base` field (`"user"` or `"default"`) and a `settings` object containing any of Zed's settings.
Example:
@@ -5345,24 +5360,30 @@ Example:
{
"profiles": {
"Presenting (Dark)": {
- "agent_buffer_font_size": 18.0,
- "buffer_font_size": 18.0,
- "theme": "One Dark",
- "ui_font_size": 18.0
+ "settings": {
+ "agent_buffer_font_size": 18.0,
+ "buffer_font_size": 18.0,
+ "theme": "One Dark",
+ "ui_font_size": 18.0
+ }
},
"Presenting (Light)": {
- "agent_buffer_font_size": 18.0,
- "buffer_font_size": 18.0,
- "theme": "One Light",
- "ui_font_size": 18.0
+ "settings": {
+ "agent_buffer_font_size": 18.0,
+ "buffer_font_size": 18.0,
+ "theme": "One Light",
+ "ui_font_size": 18.0
+ }
},
"Writing": {
- "agent_buffer_font_size": 15.0,
- "buffer_font_size": 15.0,
- "theme": "Catppuccin Frappé - No Italics",
- "ui_font_size": 15.0,
- "tab_bar": { "show": false },
- "toolbar": { "breadcrumbs": false }
+ "settings": {
+ "agent_buffer_font_size": 15.0,
+ "buffer_font_size": 15.0,
+ "theme": "Catppuccin Frappé - No Italics",
+ "ui_font_size": 15.0,
+ "tab_bar": { "show": false },
+ "toolbar": { "breadcrumbs": false }
+ }
}
}
}
@@ -53,9 +53,9 @@ Zed supports ways to spawn (and rerun) commands using its integrated [terminal](
"show_command": true,
// Which edited buffers to save before running the task:
// * `all` — save all edited buffers
- // * `current` — save current buffer only
+ // * `current` — save currently active buffer only
// * `none` — don't save any buffers
- "save": "all"
+ "save": "none"
// Represents the tags for inline runnable indicators, or spawning multiple tasks at once.
// "tags": []
}
@@ -45,13 +45,13 @@ Xcode Instruments (which comes bundled with your [Xcode](https://apps.apple.com/
1. With Zed running, open Instruments
1. Select `Time Profiler` as the profiling template
- 
+ 
1. In the `Time Profiler` configuration, set the target to the running Zed process
1. Start recording
- 
+ 
1. Perform the action in Zed that causes performance issues
1. Stop recording
- 
+ 
1. Save the trace file
1. Compress the trace file into a zip archive
1. File a [GitHub issue](https://github.com/zed-industries/zed/issues/new/choose) with the trace zip attached
@@ -562,6 +562,7 @@ You can change the following settings to modify vim mode's behavior:
| use_system_clipboard | Determines how system clipboard is used:<br><ul><li>"always": use for all operations</li><li>"never": only use when explicitly specified</li><li>"on_yank": use for yank operations</li></ul> | "always" |
| use_multiline_find | deprecated |
| use_smartcase_find | If `true`, `f` and `t` motions are case-insensitive when the target letter is lowercase. | false |
+| use_regex_search | If `true`, then vim search will use regex mode | true |
| gdefault | If `true`, the `:substitute` command replaces all matches in a line by default (as if `g` flag was given). The `g` flag then toggles this, replacing only the first match. | false |
| toggle_relative_line_numbers | If `true`, line numbers are relative in normal mode and absolute in insert mode, giving you the best of both options. | false |
| custom_digraphs | An object that allows you to add custom digraphs. Read below for an example. | {} |
@@ -587,6 +588,7 @@ Here's an example of these settings changed:
"default_mode": "insert",
"use_system_clipboard": "never",
"use_smartcase_find": true,
+ "use_regex_search": true,
"gdefault": true,
"toggle_relative_line_numbers": true,
"highlight_on_yank_duration": 50,
@@ -70,10 +70,21 @@ h5,
h6 {
position: relative;
font-family: var(--title-font);
- font-weight: 480;
+ font-weight: 400;
+}
+
+h1 {
color: var(--title-color);
}
+h2,
+h3,
+h4,
+h5,
+h6 {
+ color: var(--full-contrast);
+}
+
/* Don't change font size in headers. */
h1 code,
h2 code,
@@ -213,7 +224,7 @@ hr {
}
.content {
- padding: 48px 32px 0 32px;
+ padding: 32px 32px 0 32px;
display: flex;
justify-content: space-between;
gap: 36px;
@@ -272,10 +283,14 @@ hr {
border-radius: 8px;
overflow: clip;
}
-.content .header:link,
-.content .header:visited {
+.content h1 .header:link,
+.content h1 .header:visited {
color: var(--title-color);
}
+.content :is(h2, h3, h4, h5, h6) .header:link,
+.content :is(h2, h3, h4, h5, h6) .header:visited {
+ color: var(--full-contrast);
+}
.content .header:link,
.content .header:visited:hover {
text-decoration: none;
@@ -383,15 +398,17 @@ blockquote .warning:before {
}
kbd {
- background-color: rgba(8, 76, 207, 0.1);
+ background-color: var(--keybinding-bg);
+ padding: 4px 4px 6px 4px;
border-radius: 4px;
+ font-family: var(--mono-font);
+ display: inline-block;
+ margin: 0 2px;
border: solid 1px var(--popover-border);
box-shadow: inset 0 -1px 0 var(--theme-hover);
- display: inline-block;
font-size: var(--code-font-size);
- font-family: var(--mono-font);
+ color: var(--full-contrast);
line-height: 10px;
- padding: 4px 5px;
vertical-align: middle;
}
@@ -11,11 +11,12 @@
--page-padding: 15px;
--content-max-width: 690px;
--menu-bar-height: 64px;
- --font: "IA Writer Quattro S", sans-serif;
- --title-font: "Lora", "Helvetica Neue", Helvetica, Arial, sans-serif;
+ --font: "iA Writer Quattro S", sans-serif;
+ --title-font:
+ "IBM Plex Serif", "Helvetica Neue", Helvetica, Arial, sans-serif;
--mono-font:
- ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, Liberation Mono,
- Courier New, monospace;
+ "Lilex", ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas,
+ Liberation Mono, Courier New, monospace;
--code-font-size: 0.875em
/* please adjust the ace font size accordingly in editor.js */;
@@ -151,7 +152,7 @@
--inline-code-color: hsl(40, 100%, 80%);
--code-text: hsl(220, 13%, 95%);
--code-bg: hsl(220, 93%, 50%, 0.2);
- --keybinding-bg: hsl(0, 0%, 12%);
+ --keybinding-bg: hsl(220, 20%, 10%);
--pre-bg: hsl(220, 13%, 5%);
--pre-border: hsla(220, 93%, 70%, 0.3);
@@ -162,7 +163,7 @@
--popover-shadow:
0 10px 15px -3px hsl(0, 0%, 0%, 0.1), 0 4px 6px -4px hsl(0, 0%, 0%, 0.1);
- --theme-hover: hsl(220, 13%, 25%);
+ --theme-hover: hsl(220, 13%, 20%);
--hover-section-title: hsl(220, 13%, 11%);
--quote-bg: hsl(220, 13%, 25%, 0.4);
@@ -3,15 +3,37 @@
/* open-sans-300 - latin_vietnamese_latin-ext_greek-ext_greek_cyrillic-ext_cyrillic */
@font-face {
- font-family: "IA Writer Quattro S";
+ font-family: "iA Writer Quattro S";
+ src: url("https://cdn.zed.dev/fonts/iAWriterQuattroV.woff2")
+ format("woff2-variations");
+ font-weight: 100 900;
font-style: normal;
- font-weight: 400;
- src: url("iAWriterQuattroS-Regular.woff2") format("woff2");
+ font-display: swap;
}
@font-face {
- font-family: "Lora";
- src: url("Lora.var.woff2") format("woff2-variations");
+ font-family: "iA Writer Quattro S";
+ src: url("https://cdn.zed.dev/fonts/iAWriterQuattroV-Italic.woff2")
+ format("woff2-variations");
font-weight: 100 900;
+ font-style: italic;
+ font-display: swap;
+}
+
+@font-face {
+ font-family: "IBM Plex Serif";
+ src: url("https://cdn.zed.dev/fonts/IBMPlexSerif-Var.woff2")
+ format("woff2-variations");
+ font-weight: 400 700;
+ font-style: normal;
+ font-display: swap;
+}
+
+@font-face {
+ font-family: "Lilex";
+ src: url("https://cdn.zed.dev/fonts/Lilex-Regular.woff2")
+ format("woff2-variations");
+ font-weight: 400;
font-style: normal;
+ font-display: swap;
}
@@ -5,7 +5,7 @@
display: flex;
flex-direction: column;
gap: 4px;
- padding: 28px 0 120px 0;
+ padding: 16px 0 120px 0;
width: 200px;
max-height: calc(100svh - 50px);
overflow-x: hidden;
@@ -1,8 +1,8 @@
kbd.keybinding {
background-color: var(--keybinding-bg);
- padding: 2px 4px;
- border-radius: 3px;
- font-family: monospace;
+ padding: 4px 4px 6px 4px;
+ border-radius: 4px;
+ font-family: var(--mono-font);
display: inline-block;
margin: 0 2px;
}
@@ -1,6 +1,6 @@
[package]
name = "zed_glsl"
-version = "0.2.2"
+version = "0.2.3"
edition.workspace = true
publish.workspace = true
license = "Apache-2.0"
@@ -1,7 +1,7 @@
id = "glsl"
name = "GLSL"
description = "GLSL support."
-version = "0.2.2"
+version = "0.2.3"
schema_version = 1
authors = ["Mikayla Maki <mikayla@zed.dev>"]
repository = "https://github.com/zed-industries/zed"
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
@@ -1,6 +1,6 @@
[package]
name = "zed_proto"
-version = "0.3.1"
+version = "0.3.2"
edition.workspace = true
publish.workspace = true
license = "Apache-2.0"
@@ -1,7 +1,7 @@
id = "proto"
name = "Proto"
description = "Protocol Buffers support."
-version = "0.3.1"
+version = "0.3.2"
schema_version = 1
authors = ["Zed Industries <support@zed.dev>"]
repository = "https://github.com/zed-industries/zed"
@@ -0,0 +1,2 @@
+((comment) @injection.content
+ (#set! injection.language "comment"))
@@ -2,11 +2,11 @@
"nodes": {
"crane": {
"locked": {
- "lastModified": 1774313767,
- "narHash": "sha256-hy0XTQND6avzGEUFrJtYBBpFa/POiiaGBr2vpU6Y9tY=",
+ "lastModified": 1769737823,
+ "narHash": "sha256-DrBaNpZ+sJ4stXm+0nBX7zqZT9t9P22zbk6m5YhQxS4=",
"owner": "ipetkov",
"repo": "crane",
- "rev": "3d9df76e29656c679c744968b17fbaf28f0e923d",
+ "rev": "b2f45c3830aa96b7456a4c4bc327d04d7a43e1ba",
"type": "github"
},
"original": {
@@ -20,11 +20,11 @@
"nixpkgs-lib": "nixpkgs-lib"
},
"locked": {
- "lastModified": 1772408722,
- "narHash": "sha256-rHuJtdcOjK7rAHpHphUb1iCvgkU3GpfvicLMwwnfMT0=",
+ "lastModified": 1769996383,
+ "narHash": "sha256-AnYjnFWgS49RlqX7LrC4uA+sCCDBj0Ry/WOJ5XWAsa0=",
"owner": "hercules-ci",
"repo": "flake-parts",
- "rev": "f20dc5d9b8027381c474144ecabc9034d6a839a3",
+ "rev": "57928607ea566b5db3ad13af0e57e921e6b12381",
"type": "github"
},
"original": {
@@ -35,11 +35,11 @@
},
"nixpkgs": {
"locked": {
- "lastModified": 1774709303,
- "narHash": "sha256-D3Q07BbIA2KnTcSXIqqu9P586uWxN74zNoCH3h2ESHg=",
+ "lastModified": 1769789167,
+ "narHash": "sha256-kKB3bqYJU5nzYeIROI82Ef9VtTbu4uA3YydSk/Bioa8=",
"owner": "NixOS",
"repo": "nixpkgs",
- "rev": "8110df5ad7abf5d4c0f6fb0f8f978390e77f9685",
+ "rev": "62c8382960464ceb98ea593cb8321a2cf8f9e3e5",
"type": "github"
},
"original": {
@@ -51,11 +51,11 @@
},
"nixpkgs-lib": {
"locked": {
- "lastModified": 1772328832,
- "narHash": "sha256-e+/T/pmEkLP6BHhYjx6GmwP5ivonQQn0bJdH9YrRB+Q=",
+ "lastModified": 1769909678,
+ "narHash": "sha256-cBEymOf4/o3FD5AZnzC3J9hLbiZ+QDT/KDuyHXVJOpM=",
"owner": "nix-community",
"repo": "nixpkgs.lib",
- "rev": "c185c7a5e5dd8f9add5b2f8ebeff00888b070742",
+ "rev": "72716169fe93074c333e8d0173151350670b824c",
"type": "github"
},
"original": {
@@ -79,11 +79,11 @@
]
},
"locked": {
- "lastModified": 1774840424,
- "narHash": "sha256-3Oi4mBKzOCFQYLUyEjyc0s5cnlNj1MzmhpVKoLptpe8=",
+ "lastModified": 1775013181,
+ "narHash": "sha256-zPrt6oNM1r/RO5bWYaZ3hthfG9vzkr6kQdoqDd5x4Qw=",
"owner": "oxalica",
"repo": "rust-overlay",
- "rev": "d9f52b51548e76ab8b6e7d647763047ebdec835c",
+ "rev": "e8046c1d9ccadd497c2344d8fa49dab62f22f7be",
"type": "github"
},
"original": {
@@ -38,6 +38,8 @@
libxfixes,
libxkbcommon,
libxrandr,
+ libx11,
+ libxcb,
nodejs_22,
openssl,
perl,
@@ -181,8 +183,8 @@ let
wayland
gpu-lib
libglvnd
- xorg.libX11
- xorg.libxcb
+ libx11
+ libxcb
libdrm
libgbm
libva
@@ -37,6 +37,8 @@
libxfixes,
libxrandr,
libxtst,
+ libx11,
+ libxi,
pipewire,
xorg,
}:
@@ -224,8 +226,8 @@ stdenv.mkDerivation {
libxrandr
libxtst
pipewire
- xorg.libX11
- xorg.libXi
+ libx11
+ libxi
]);
preConfigure = ''
@@ -131,14 +131,14 @@ if [[ "$DRY_RUN" == "true" ]]; then
echo "Would auto-apply suggestions to docs via Droid and create a draft PR."
echo "Model: $MODEL"
echo ""
-
+
# Show each suggestion file
for file in $(echo "$MANIFEST" | jq -r '.suggestions[].file'); do
echo "--- $file ---"
git show "origin/$SUGGESTIONS_BRANCH:$file" 2>/dev/null || echo "(file not found)"
echo ""
done
-
+
echo -e "${YELLOW}=== END DRY RUN ===${NC}"
echo ""
echo "Run without --dry-run to create the PR."
@@ -213,7 +213,7 @@ fi
FLAGGED_PRS=()
FLAGS_FILE="$REPO_ROOT/crates/feature_flags/src/flags.rs"
if [[ -f "$FLAGS_FILE" ]]; then
- # Extract feature flag struct names (e.g. SubagentsFeatureFlag, GitGraphFeatureFlag)
+ # Extract feature flag struct names (e.g. SubagentsFeatureFlag)
FLAG_NAMES=$(grep -oE 'pub struct \w+FeatureFlag' "$FLAGS_FILE" | awk '{print $3}')
if [[ -n "$FLAG_NAMES" ]]; then
FLAG_PATTERN=$(echo "$FLAG_NAMES" | tr '\n' '|' | sed 's/|$//')
@@ -538,10 +538,10 @@ echo -e "${GREEN}PR created:${NC} $PR_URL"
if [[ "$KEEP_QUEUE" != "true" ]]; then
echo ""
echo "Resetting suggestions queue..."
-
+
git checkout --orphan "${SUGGESTIONS_BRANCH}-reset"
git rm -rf . > /dev/null 2>&1 || true
-
+
cat > README.md << 'EOF'
# Documentation Suggestions Queue
@@ -562,19 +562,19 @@ run `script/docs-suggest-publish` to create a documentation PR from these sugges
3. At preview release, suggestions are collected into a docs PR
4. After docs PR is created, this branch is reset
EOF
-
+
mkdir -p suggestions
echo '{"suggestions":[]}' > manifest.json
git add README.md suggestions manifest.json
git commit -m "Reset documentation suggestions queue
Previous suggestions published in: $PR_URL"
-
+
# Force push required: replacing the orphan suggestions branch with a clean slate
git push -f origin "${SUGGESTIONS_BRANCH}-reset:$SUGGESTIONS_BRANCH"
git checkout "$ORIGINAL_BRANCH"
git branch -D "${SUGGESTIONS_BRANCH}-reset"
-
+
echo "Suggestions queue reset."
else
git checkout "$ORIGINAL_BRANCH"
@@ -0,0 +1,38 @@
+[package]
+name = "compliance"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[features]
+octo-client = ["dep:octocrab", "dep:jsonwebtoken", "dep:futures", "dep:tokio"]
+
+[dependencies]
+anyhow.workspace = true
+async-trait.workspace = true
+derive_more.workspace = true
+futures = { workspace = true, optional = true }
+itertools.workspace = true
+jsonwebtoken = { version = "10.2", features = ["use_pem"], optional = true }
+octocrab = { version = "0.49", default-features = false, features = [
+ "default-client",
+ "jwt-aws-lc-rs",
+ "retry",
+ "rustls",
+ "rustls-aws-lc-rs",
+ "stream",
+ "timeout"
+], optional = true }
+regex.workspace = true
+semver.workspace = true
+serde.workspace = true
+serde_json.workspace = true
+tokio = { workspace = true, optional = true }
+
+[dev-dependencies]
+indoc.workspace = true
+tokio = { workspace = true, features = ["rt", "macros"] }
@@ -0,0 +1 @@
+../../LICENSE-GPL
@@ -0,0 +1,647 @@
+use std::{fmt, ops::Not as _};
+
+use itertools::Itertools as _;
+
+use crate::{
+ git::{CommitDetails, CommitList},
+ github::{
+ CommitAuthor, GitHubClient, GitHubUser, GithubLogin, PullRequestComment, PullRequestData,
+ PullRequestReview, ReviewState,
+ },
+ report::Report,
+};
+
+const ZED_ZIPPY_COMMENT_APPROVAL_PATTERN: &str = "@zed-zippy approve";
+const ZED_ZIPPY_GROUP_APPROVAL: &str = "@zed-industries/approved";
+
+#[derive(Debug)]
+pub enum ReviewSuccess {
+ ApprovingComment(Vec<PullRequestComment>),
+ CoAuthored(Vec<CommitAuthor>),
+ ExternalMergedContribution { merged_by: GitHubUser },
+ PullRequestReviewed(Vec<PullRequestReview>),
+}
+
+impl ReviewSuccess {
+ pub(crate) fn reviewers(&self) -> anyhow::Result<String> {
+ let reviewers = match self {
+ Self::CoAuthored(authors) => authors.iter().map(ToString::to_string).collect_vec(),
+ Self::PullRequestReviewed(reviews) => reviews
+ .iter()
+ .filter_map(|review| review.user.as_ref())
+ .map(|user| format!("@{}", user.login))
+ .collect_vec(),
+ Self::ApprovingComment(comments) => comments
+ .iter()
+ .map(|comment| format!("@{}", comment.user.login))
+ .collect_vec(),
+ Self::ExternalMergedContribution { merged_by } => {
+ vec![format!("@{}", merged_by.login)]
+ }
+ };
+
+ let reviewers = reviewers.into_iter().unique().collect_vec();
+
+ reviewers
+ .is_empty()
+ .not()
+ .then(|| reviewers.join(", "))
+ .ok_or_else(|| anyhow::anyhow!("Expected at least one reviewer"))
+ }
+}
+
+impl fmt::Display for ReviewSuccess {
+ fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::CoAuthored(_) => formatter.write_str("Co-authored by an organization member"),
+ Self::PullRequestReviewed(_) => {
+ formatter.write_str("Approved by an organization review")
+ }
+ Self::ApprovingComment(_) => {
+ formatter.write_str("Approved by an organization approval comment")
+ }
+ Self::ExternalMergedContribution { .. } => {
+ formatter.write_str("External merged contribution")
+ }
+ }
+ }
+}
+
+#[derive(Debug)]
+pub enum ReviewFailure {
+ // todo: We could still query the GitHub API here to search for one
+ NoPullRequestFound,
+ Unreviewed,
+ UnableToDetermineReviewer,
+ Other(anyhow::Error),
+}
+
+impl fmt::Display for ReviewFailure {
+ fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::NoPullRequestFound => formatter.write_str("No pull request found"),
+ Self::Unreviewed => formatter
+ .write_str("No qualifying organization approval found for the pull request"),
+ Self::UnableToDetermineReviewer => formatter.write_str("Could not determine reviewer"),
+ Self::Other(error) => write!(formatter, "Failed to inspect review state: {error}"),
+ }
+ }
+}
+
+pub(crate) type ReviewResult = Result<ReviewSuccess, ReviewFailure>;
+
+impl<E: Into<anyhow::Error>> From<E> for ReviewFailure {
+ fn from(err: E) -> Self {
+ Self::Other(anyhow::anyhow!(err))
+ }
+}
+
+pub struct Reporter<'a> {
+ commits: CommitList,
+ github_client: &'a GitHubClient,
+}
+
+impl<'a> Reporter<'a> {
+ pub fn new(commits: CommitList, github_client: &'a GitHubClient) -> Self {
+ Self {
+ commits,
+ github_client,
+ }
+ }
+
+ /// Method that checks every commit for compliance
+ async fn check_commit(&self, commit: &CommitDetails) -> Result<ReviewSuccess, ReviewFailure> {
+ let Some(pr_number) = commit.pr_number() else {
+ return Err(ReviewFailure::NoPullRequestFound);
+ };
+
+ let pull_request = self.github_client.get_pull_request(pr_number).await?;
+
+ if let Some(approval) = self.check_pull_request_approved(&pull_request).await? {
+ return Ok(approval);
+ }
+
+ if let Some(approval) = self
+ .check_approving_pull_request_comment(&pull_request)
+ .await?
+ {
+ return Ok(approval);
+ }
+
+ if let Some(approval) = self.check_commit_co_authors(commit).await? {
+ return Ok(approval);
+ }
+
+ // if let Some(approval) = self.check_external_merged_pr(pr_number).await? {
+ // return Ok(approval);
+ // }
+
+ Err(ReviewFailure::Unreviewed)
+ }
+
+ async fn check_commit_co_authors(
+ &self,
+ commit: &CommitDetails,
+ ) -> Result<Option<ReviewSuccess>, ReviewFailure> {
+ if commit.co_authors().is_some()
+ && let Some(commit_authors) = self
+ .github_client
+ .get_commit_authors([commit.sha()])
+ .await?
+ .get(commit.sha())
+ .and_then(|authors| authors.co_authors())
+ {
+ let mut org_co_authors = Vec::new();
+ for co_author in commit_authors {
+ if let Some(github_login) = co_author.user()
+ && self
+ .github_client
+ .check_org_membership(github_login)
+ .await?
+ {
+ org_co_authors.push(co_author.clone());
+ }
+ }
+
+ Ok(org_co_authors
+ .is_empty()
+ .not()
+ .then_some(ReviewSuccess::CoAuthored(org_co_authors)))
+ } else {
+ Ok(None)
+ }
+ }
+
+ #[allow(unused)]
+ async fn check_external_merged_pr(
+ &self,
+ pull_request: PullRequestData,
+ ) -> Result<Option<ReviewSuccess>, ReviewFailure> {
+ if let Some(user) = pull_request.user
+ && self
+ .github_client
+ .check_org_membership(&GithubLogin::new(user.login))
+ .await?
+ .not()
+ {
+ pull_request.merged_by.map_or(
+ Err(ReviewFailure::UnableToDetermineReviewer),
+ |merged_by| {
+ Ok(Some(ReviewSuccess::ExternalMergedContribution {
+ merged_by,
+ }))
+ },
+ )
+ } else {
+ Ok(None)
+ }
+ }
+
+ async fn check_pull_request_approved(
+ &self,
+ pull_request: &PullRequestData,
+ ) -> Result<Option<ReviewSuccess>, ReviewFailure> {
+ let pr_reviews = self
+ .github_client
+ .get_pull_request_reviews(pull_request.number)
+ .await?;
+
+ if !pr_reviews.is_empty() {
+ let mut org_approving_reviews = Vec::new();
+ for review in pr_reviews {
+ if let Some(github_login) = review.user.as_ref()
+ && pull_request
+ .user
+ .as_ref()
+ .is_none_or(|pr_user| pr_user.login != github_login.login)
+ && review
+ .state
+ .is_some_and(|state| state == ReviewState::Approved)
+ && self
+ .github_client
+ .check_org_membership(&GithubLogin::new(github_login.login.clone()))
+ .await?
+ {
+ org_approving_reviews.push(review);
+ }
+ }
+
+ Ok(org_approving_reviews
+ .is_empty()
+ .not()
+ .then_some(ReviewSuccess::PullRequestReviewed(org_approving_reviews)))
+ } else {
+ Ok(None)
+ }
+ }
+
+ async fn check_approving_pull_request_comment(
+ &self,
+ pull_request: &PullRequestData,
+ ) -> Result<Option<ReviewSuccess>, ReviewFailure> {
+ let other_comments = self
+ .github_client
+ .get_pull_request_comments(pull_request.number)
+ .await?;
+
+ if !other_comments.is_empty() {
+ let mut org_approving_comments = Vec::new();
+
+ for comment in other_comments {
+ if pull_request
+ .user
+ .as_ref()
+ .is_some_and(|pr_author| pr_author.login != comment.user.login)
+ && comment.body.as_ref().is_some_and(|body| {
+ body.contains(ZED_ZIPPY_COMMENT_APPROVAL_PATTERN)
+ || body.contains(ZED_ZIPPY_GROUP_APPROVAL)
+ })
+ && self
+ .github_client
+ .check_org_membership(&GithubLogin::new(comment.user.login.clone()))
+ .await?
+ {
+ org_approving_comments.push(comment);
+ }
+ }
+
+ Ok(org_approving_comments
+ .is_empty()
+ .not()
+ .then_some(ReviewSuccess::ApprovingComment(org_approving_comments)))
+ } else {
+ Ok(None)
+ }
+ }
+
+ pub async fn generate_report(mut self) -> anyhow::Result<Report> {
+ let mut report = Report::new();
+
+ let commits_to_check = std::mem::take(&mut self.commits);
+ let total_commits = commits_to_check.len();
+
+ for (i, commit) in commits_to_check.into_iter().enumerate() {
+ println!(
+ "Checking commit {:?} ({current}/{total})",
+ commit.sha().short(),
+ current = i + 1,
+ total = total_commits
+ );
+
+ let review_result = self.check_commit(&commit).await;
+
+ if let Err(err) = &review_result {
+ println!("Commit {:?} failed review: {:?}", commit.sha().short(), err);
+ }
+
+ report.add(commit, review_result);
+ }
+
+ Ok(report)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use std::rc::Rc;
+ use std::str::FromStr;
+
+ use crate::git::{CommitDetails, CommitList, CommitSha};
+ use crate::github::{
+ AuthorsForCommits, GitHubApiClient, GitHubClient, GitHubUser, GithubLogin,
+ PullRequestComment, PullRequestData, PullRequestReview, ReviewState,
+ };
+
+ use super::{Reporter, ReviewFailure, ReviewSuccess};
+
+ struct MockGitHubApi {
+ pull_request: PullRequestData,
+ reviews: Vec<PullRequestReview>,
+ comments: Vec<PullRequestComment>,
+ commit_authors_json: serde_json::Value,
+ org_members: Vec<String>,
+ }
+
+ #[async_trait::async_trait(?Send)]
+ impl GitHubApiClient for MockGitHubApi {
+ async fn get_pull_request(&self, _pr_number: u64) -> anyhow::Result<PullRequestData> {
+ Ok(self.pull_request.clone())
+ }
+
+ async fn get_pull_request_reviews(
+ &self,
+ _pr_number: u64,
+ ) -> anyhow::Result<Vec<PullRequestReview>> {
+ Ok(self.reviews.clone())
+ }
+
+ async fn get_pull_request_comments(
+ &self,
+ _pr_number: u64,
+ ) -> anyhow::Result<Vec<PullRequestComment>> {
+ Ok(self.comments.clone())
+ }
+
+ async fn get_commit_authors(
+ &self,
+ _commit_shas: &[&CommitSha],
+ ) -> anyhow::Result<AuthorsForCommits> {
+ serde_json::from_value(self.commit_authors_json.clone()).map_err(Into::into)
+ }
+
+ async fn check_org_membership(&self, login: &GithubLogin) -> anyhow::Result<bool> {
+ Ok(self
+ .org_members
+ .iter()
+ .any(|member| member == login.as_str()))
+ }
+
+ async fn ensure_pull_request_has_label(
+ &self,
+ _label: &str,
+ _pr_number: u64,
+ ) -> anyhow::Result<()> {
+ Ok(())
+ }
+ }
+
+ fn make_commit(
+ sha: &str,
+ author_name: &str,
+ author_email: &str,
+ title: &str,
+ body: &str,
+ ) -> CommitDetails {
+ let formatted = format!(
+ "{sha}|field-delimiter|{author_name}|field-delimiter|{author_email}|field-delimiter|\
+ {title}|body-delimiter|{body}|commit-delimiter|"
+ );
+ CommitList::from_str(&formatted)
+ .expect("test commit should parse")
+ .into_iter()
+ .next()
+ .expect("should have one commit")
+ }
+
+ fn review(login: &str, state: ReviewState) -> PullRequestReview {
+ PullRequestReview {
+ user: Some(GitHubUser {
+ login: login.to_owned(),
+ }),
+ state: Some(state),
+ }
+ }
+
+ fn comment(login: &str, body: &str) -> PullRequestComment {
+ PullRequestComment {
+ user: GitHubUser {
+ login: login.to_owned(),
+ },
+ body: Some(body.to_owned()),
+ }
+ }
+
+ struct TestScenario {
+ pull_request: PullRequestData,
+ reviews: Vec<PullRequestReview>,
+ comments: Vec<PullRequestComment>,
+ commit_authors_json: serde_json::Value,
+ org_members: Vec<String>,
+ commit: CommitDetails,
+ }
+
+ impl TestScenario {
+ fn single_commit() -> Self {
+ Self {
+ pull_request: PullRequestData {
+ number: 1234,
+ user: Some(GitHubUser {
+ login: "alice".to_owned(),
+ }),
+ merged_by: None,
+ },
+ reviews: vec![],
+ comments: vec![],
+ commit_authors_json: serde_json::json!({}),
+ org_members: vec![],
+ commit: make_commit(
+ "abc12345abc12345",
+ "Alice",
+ "alice@test.com",
+ "Fix thing (#1234)",
+ "",
+ ),
+ }
+ }
+
+ fn with_reviews(mut self, reviews: Vec<PullRequestReview>) -> Self {
+ self.reviews = reviews;
+ self
+ }
+
+ fn with_comments(mut self, comments: Vec<PullRequestComment>) -> Self {
+ self.comments = comments;
+ self
+ }
+
+ fn with_org_members(mut self, members: Vec<&str>) -> Self {
+ self.org_members = members.into_iter().map(str::to_owned).collect();
+ self
+ }
+
+ fn with_commit_authors_json(mut self, json: serde_json::Value) -> Self {
+ self.commit_authors_json = json;
+ self
+ }
+
+ fn with_commit(mut self, commit: CommitDetails) -> Self {
+ self.commit = commit;
+ self
+ }
+
+ async fn run_scenario(self) -> Result<ReviewSuccess, ReviewFailure> {
+ let mock = MockGitHubApi {
+ pull_request: self.pull_request,
+ reviews: self.reviews,
+ comments: self.comments,
+ commit_authors_json: self.commit_authors_json,
+ org_members: self.org_members,
+ };
+ let client = GitHubClient::new(Rc::new(mock));
+ let reporter = Reporter::new(CommitList::default(), &client);
+ reporter.check_commit(&self.commit).await
+ }
+ }
+
+ #[tokio::test]
+ async fn approved_review_by_org_member_succeeds() {
+ let result = TestScenario::single_commit()
+ .with_reviews(vec![review("bob", ReviewState::Approved)])
+ .with_org_members(vec!["bob"])
+ .run_scenario()
+ .await;
+ assert!(matches!(result, Ok(ReviewSuccess::PullRequestReviewed(_))));
+ }
+
+ #[tokio::test]
+ async fn non_approved_review_state_is_not_accepted() {
+ let result = TestScenario::single_commit()
+ .with_reviews(vec![review("bob", ReviewState::Other)])
+ .with_org_members(vec!["bob"])
+ .run_scenario()
+ .await;
+ assert!(matches!(result, Err(ReviewFailure::Unreviewed)));
+ }
+
+ #[tokio::test]
+ async fn review_by_non_org_member_is_not_accepted() {
+ let result = TestScenario::single_commit()
+ .with_reviews(vec![review("bob", ReviewState::Approved)])
+ .run_scenario()
+ .await;
+ assert!(matches!(result, Err(ReviewFailure::Unreviewed)));
+ }
+
+ #[tokio::test]
+ async fn pr_author_own_approval_review_is_rejected() {
+ let result = TestScenario::single_commit()
+ .with_reviews(vec![review("alice", ReviewState::Approved)])
+ .with_org_members(vec!["alice"])
+ .run_scenario()
+ .await;
+ assert!(matches!(result, Err(ReviewFailure::Unreviewed)));
+ }
+
+ #[tokio::test]
+ async fn pr_author_own_approval_comment_is_rejected() {
+ let result = TestScenario::single_commit()
+ .with_comments(vec![comment("alice", "@zed-zippy approve")])
+ .with_org_members(vec!["alice"])
+ .run_scenario()
+ .await;
+ assert!(matches!(result, Err(ReviewFailure::Unreviewed)));
+ }
+
+ #[tokio::test]
+ async fn approval_comment_by_org_member_succeeds() {
+ let result = TestScenario::single_commit()
+ .with_comments(vec![comment("bob", "@zed-zippy approve")])
+ .with_org_members(vec!["bob"])
+ .run_scenario()
+ .await;
+ assert!(matches!(result, Ok(ReviewSuccess::ApprovingComment(_))));
+ }
+
+ #[tokio::test]
+ async fn group_approval_comment_by_org_member_succeeds() {
+ let result = TestScenario::single_commit()
+ .with_comments(vec![comment("bob", "@zed-industries/approved")])
+ .with_org_members(vec!["bob"])
+ .run_scenario()
+ .await;
+ assert!(matches!(result, Ok(ReviewSuccess::ApprovingComment(_))));
+ }
+
+ #[tokio::test]
+ async fn comment_without_approval_pattern_is_not_accepted() {
+ let result = TestScenario::single_commit()
+ .with_comments(vec![comment("bob", "looks good")])
+ .with_org_members(vec!["bob"])
+ .run_scenario()
+ .await;
+ assert!(matches!(result, Err(ReviewFailure::Unreviewed)));
+ }
+
+ #[tokio::test]
+ async fn commit_without_pr_number_is_no_pr_found() {
+ let result = TestScenario::single_commit()
+ .with_commit(make_commit(
+ "abc12345abc12345",
+ "Alice",
+ "alice@test.com",
+ "Fix thing without PR number",
+ "",
+ ))
+ .run_scenario()
+ .await;
+ assert!(matches!(result, Err(ReviewFailure::NoPullRequestFound)));
+ }
+
+ #[tokio::test]
+ async fn pr_review_takes_precedence_over_comment() {
+ let result = TestScenario::single_commit()
+ .with_reviews(vec![review("bob", ReviewState::Approved)])
+ .with_comments(vec![comment("charlie", "@zed-zippy approve")])
+ .with_org_members(vec!["bob", "charlie"])
+ .run_scenario()
+ .await;
+ assert!(matches!(result, Ok(ReviewSuccess::PullRequestReviewed(_))));
+ }
+
+ #[tokio::test]
+ async fn comment_takes_precedence_over_co_author() {
+ let result = TestScenario::single_commit()
+ .with_comments(vec![comment("bob", "@zed-zippy approve")])
+ .with_commit_authors_json(serde_json::json!({
+ "abc12345abc12345": {
+ "author": {
+ "name": "Alice",
+ "email": "alice@test.com",
+ "user": { "login": "alice" }
+ },
+ "authors": [{
+ "name": "Charlie",
+ "email": "charlie@test.com",
+ "user": { "login": "charlie" }
+ }]
+ }
+ }))
+ .with_commit(make_commit(
+ "abc12345abc12345",
+ "Alice",
+ "alice@test.com",
+ "Fix thing (#1234)",
+ "Co-authored-by: Charlie <charlie@test.com>",
+ ))
+ .with_org_members(vec!["bob", "charlie"])
+ .run_scenario()
+ .await;
+ assert!(matches!(result, Ok(ReviewSuccess::ApprovingComment(_))));
+ }
+
+ #[tokio::test]
+ async fn co_author_org_member_succeeds() {
+ let result = TestScenario::single_commit()
+ .with_commit_authors_json(serde_json::json!({
+ "abc12345abc12345": {
+ "author": {
+ "name": "Alice",
+ "email": "alice@test.com",
+ "user": { "login": "alice" }
+ },
+ "authors": [{
+ "name": "Bob",
+ "email": "bob@test.com",
+ "user": { "login": "bob" }
+ }]
+ }
+ }))
+ .with_commit(make_commit(
+ "abc12345abc12345",
+ "Alice",
+ "alice@test.com",
+ "Fix thing (#1234)",
+ "Co-authored-by: Bob <bob@test.com>",
+ ))
+ .with_org_members(vec!["bob"])
+ .run_scenario()
+ .await;
+ assert!(matches!(result, Ok(ReviewSuccess::CoAuthored(_))));
+ }
+
+ #[tokio::test]
+ async fn no_reviews_no_comments_no_coauthors_is_unreviewed() {
+ let result = TestScenario::single_commit().run_scenario().await;
+ assert!(matches!(result, Err(ReviewFailure::Unreviewed)));
+ }
+}
@@ -0,0 +1,591 @@
+#![allow(clippy::disallowed_methods, reason = "This is only used in xtasks")]
+use std::{
+ fmt::{self, Debug},
+ ops::Not,
+ process::Command,
+ str::FromStr,
+ sync::LazyLock,
+};
+
+use anyhow::{Context, Result, anyhow};
+use derive_more::{Deref, DerefMut, FromStr};
+
+use itertools::Itertools;
+use regex::Regex;
+use semver::Version;
+use serde::Deserialize;
+
+pub trait Subcommand {
+ type ParsedOutput: FromStr<Err = anyhow::Error>;
+
+ fn args(&self) -> impl IntoIterator<Item = String>;
+}
+
+#[derive(Deref, DerefMut)]
+pub struct GitCommand<G: Subcommand> {
+ #[deref]
+ #[deref_mut]
+ subcommand: G,
+}
+
+impl<G: Subcommand> GitCommand<G> {
+ #[must_use]
+ pub fn run(subcommand: G) -> Result<G::ParsedOutput> {
+ Self { subcommand }.run_impl()
+ }
+
+ fn run_impl(self) -> Result<G::ParsedOutput> {
+ let command_output = Command::new("git")
+ .args(self.subcommand.args())
+ .output()
+ .context("Failed to spawn command")?;
+
+ if command_output.status.success() {
+ String::from_utf8(command_output.stdout)
+ .map_err(|_| anyhow!("Invalid UTF8"))
+ .and_then(|s| {
+ G::ParsedOutput::from_str(s.trim())
+ .map_err(|e| anyhow!("Failed to parse from string: {e:?}"))
+ })
+ } else {
+ anyhow::bail!(
+ "Command failed with exit code {}, stderr: {}",
+ command_output.status.code().unwrap_or_default(),
+ String::from_utf8(command_output.stderr).unwrap_or_default()
+ )
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
+pub enum ReleaseChannel {
+ Stable,
+ Preview,
+}
+
+impl ReleaseChannel {
+ pub(crate) fn tag_suffix(&self) -> &'static str {
+ match self {
+ ReleaseChannel::Stable => "",
+ ReleaseChannel::Preview => "-pre",
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct VersionTag(Version, ReleaseChannel);
+
+impl VersionTag {
+ pub fn parse(input: &str) -> Result<Self, anyhow::Error> {
+ // Being a bit more lenient for human inputs
+ let version = input.strip_prefix('v').unwrap_or(input);
+
+ let (version_str, channel) = version
+ .strip_suffix("-pre")
+ .map_or((version, ReleaseChannel::Stable), |version_str| {
+ (version_str, ReleaseChannel::Preview)
+ });
+
+ Version::parse(version_str)
+ .map(|version| Self(version, channel))
+ .map_err(|_| anyhow::anyhow!("Failed to parse version from tag!"))
+ }
+
+ pub fn version(&self) -> &Version {
+ &self.0
+ }
+}
+
+impl ToString for VersionTag {
+ fn to_string(&self) -> String {
+ format!(
+ "v{version}{channel_suffix}",
+ version = self.0,
+ channel_suffix = self.1.tag_suffix()
+ )
+ }
+}
+
+#[derive(Debug, Deref, FromStr, PartialEq, Eq, Hash, Deserialize)]
+pub struct CommitSha(pub(crate) String);
+
+impl CommitSha {
+ pub fn short(&self) -> &str {
+ self.0.as_str().split_at(8).0
+ }
+}
+
+#[derive(Debug)]
+pub struct CommitDetails {
+ sha: CommitSha,
+ author: Committer,
+ title: String,
+ body: String,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Committer {
+ name: String,
+ email: String,
+}
+
+impl Committer {
+ pub fn new(name: &str, email: &str) -> Self {
+ Self {
+ name: name.to_owned(),
+ email: email.to_owned(),
+ }
+ }
+}
+
+impl fmt::Display for Committer {
+ fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(formatter, "{} ({})", self.name, self.email)
+ }
+}
+
+impl CommitDetails {
+ const BODY_DELIMITER: &str = "|body-delimiter|";
+ const COMMIT_DELIMITER: &str = "|commit-delimiter|";
+ const FIELD_DELIMITER: &str = "|field-delimiter|";
+ const FORMAT_STRING: &str = "%H|field-delimiter|%an|field-delimiter|%ae|field-delimiter|%s|body-delimiter|%b|commit-delimiter|";
+
+ fn parse(line: &str, body: &str) -> Result<Self, anyhow::Error> {
+ let Some([sha, author_name, author_email, title]) =
+ line.splitn(4, Self::FIELD_DELIMITER).collect_array()
+ else {
+ return Err(anyhow!("Failed to parse commit fields from input {line}"));
+ };
+
+ Ok(CommitDetails {
+ sha: CommitSha(sha.to_owned()),
+ author: Committer::new(author_name, author_email),
+ title: title.to_owned(),
+ body: body.to_owned(),
+ })
+ }
+
+ pub fn pr_number(&self) -> Option<u64> {
+ // Since we use squash merge, all commit titles end with the '(#12345)' pattern.
+ // While we could strictly speaking index into this directly, go for a slightly
+ // less prone approach to errors
+ const PATTERN: &str = " (#";
+ self.title
+ .rfind(PATTERN)
+ .and_then(|location| {
+ self.title[location..]
+ .find(')')
+ .map(|relative_end| location + PATTERN.len()..location + relative_end)
+ })
+ .and_then(|range| self.title[range].parse().ok())
+ }
+
+ pub(crate) fn co_authors(&self) -> Option<Vec<Committer>> {
+ static CO_AUTHOR_REGEX: LazyLock<Regex> =
+ LazyLock::new(|| Regex::new(r"Co-authored-by: (.+) <(.+)>").unwrap());
+
+ let mut co_authors = Vec::new();
+
+ for cap in CO_AUTHOR_REGEX.captures_iter(&self.body.as_ref()) {
+ let Some((name, email)) = cap
+ .get(1)
+ .map(|m| m.as_str())
+ .zip(cap.get(2).map(|m| m.as_str()))
+ else {
+ continue;
+ };
+ co_authors.push(Committer::new(name, email));
+ }
+
+ co_authors.is_empty().not().then_some(co_authors)
+ }
+
+ pub(crate) fn author(&self) -> &Committer {
+ &self.author
+ }
+
+ pub(crate) fn title(&self) -> &str {
+ &self.title
+ }
+
+ pub(crate) fn sha(&self) -> &CommitSha {
+ &self.sha
+ }
+}
+
+#[derive(Debug, Deref, Default, DerefMut)]
+pub struct CommitList(Vec<CommitDetails>);
+
+impl CommitList {
+ pub fn range(&self) -> Option<String> {
+ self.0
+ .first()
+ .zip(self.0.last())
+ .map(|(first, last)| format!("{}..{}", first.sha().0, last.sha().0))
+ }
+}
+
+impl IntoIterator for CommitList {
+ type IntoIter = std::vec::IntoIter<CommitDetails>;
+ type Item = CommitDetails;
+
+ fn into_iter(self) -> std::vec::IntoIter<Self::Item> {
+ self.0.into_iter()
+ }
+}
+
+impl FromStr for CommitList {
+ type Err = anyhow::Error;
+
+ fn from_str(input: &str) -> Result<Self, Self::Err> {
+ Ok(CommitList(
+ input
+ .split(CommitDetails::COMMIT_DELIMITER)
+ .filter(|commit_details| !commit_details.is_empty())
+ .map(|commit_details| {
+ let (line, body) = commit_details
+ .trim()
+ .split_once(CommitDetails::BODY_DELIMITER)
+ .expect("Missing body delimiter");
+ CommitDetails::parse(line, body)
+ .expect("Parsing from the output should succeed")
+ })
+ .collect(),
+ ))
+ }
+}
+
+pub struct GetVersionTags;
+
+impl Subcommand for GetVersionTags {
+ type ParsedOutput = VersionTagList;
+
+ fn args(&self) -> impl IntoIterator<Item = String> {
+ ["tag", "-l", "v*"].map(ToOwned::to_owned)
+ }
+}
+
+pub struct VersionTagList(Vec<VersionTag>);
+
+impl VersionTagList {
+ pub fn sorted(mut self) -> Self {
+ self.0.sort_by(|a, b| a.version().cmp(b.version()));
+ self
+ }
+
+ pub fn find_previous_minor_version(&self, version_tag: &VersionTag) -> Option<&VersionTag> {
+ self.0
+ .iter()
+ .take_while(|tag| tag.version() < version_tag.version())
+ .collect_vec()
+ .into_iter()
+ .rev()
+ .find(|tag| {
+ (tag.version().major < version_tag.version().major
+ || (tag.version().major == version_tag.version().major
+ && tag.version().minor < version_tag.version().minor))
+ && tag.version().patch == 0
+ })
+ }
+}
+
+impl FromStr for VersionTagList {
+ type Err = anyhow::Error;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let version_tags = s.lines().flat_map(VersionTag::parse).collect_vec();
+
+ version_tags
+ .is_empty()
+ .not()
+ .then_some(Self(version_tags))
+ .ok_or_else(|| anyhow::anyhow!("No version tags found"))
+ }
+}
+
+pub struct CommitsFromVersionToHead {
+ version_tag: VersionTag,
+ branch: String,
+}
+
+impl CommitsFromVersionToHead {
+ pub fn new(version_tag: VersionTag, branch: String) -> Self {
+ Self {
+ version_tag,
+ branch,
+ }
+ }
+}
+
+impl Subcommand for CommitsFromVersionToHead {
+ type ParsedOutput = CommitList;
+
+ fn args(&self) -> impl IntoIterator<Item = String> {
+ [
+ "log".to_string(),
+ format!("--pretty=format:{}", CommitDetails::FORMAT_STRING),
+ format!(
+ "{version}..{branch}",
+ version = self.version_tag.to_string(),
+ branch = self.branch
+ ),
+ ]
+ }
+}
+
+pub struct NoOutput;
+
+impl FromStr for NoOutput {
+ type Err = anyhow::Error;
+
+ fn from_str(_: &str) -> Result<Self, Self::Err> {
+ Ok(NoOutput)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use indoc::indoc;
+
+ #[test]
+ fn parse_stable_version_tag() {
+ let tag = VersionTag::parse("v0.172.8").unwrap();
+ assert_eq!(tag.version().major, 0);
+ assert_eq!(tag.version().minor, 172);
+ assert_eq!(tag.version().patch, 8);
+ assert_eq!(tag.1, ReleaseChannel::Stable);
+ }
+
+ #[test]
+ fn parse_preview_version_tag() {
+ let tag = VersionTag::parse("v0.172.1-pre").unwrap();
+ assert_eq!(tag.version().major, 0);
+ assert_eq!(tag.version().minor, 172);
+ assert_eq!(tag.version().patch, 1);
+ assert_eq!(tag.1, ReleaseChannel::Preview);
+ }
+
+ #[test]
+ fn parse_version_tag_without_v_prefix() {
+ let tag = VersionTag::parse("0.172.8").unwrap();
+ assert_eq!(tag.version().major, 0);
+ assert_eq!(tag.version().minor, 172);
+ assert_eq!(tag.version().patch, 8);
+ }
+
+ #[test]
+ fn parse_invalid_version_tag() {
+ let result = VersionTag::parse("vConradTest");
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn version_tag_stable_roundtrip() {
+ let tag = VersionTag::parse("v0.172.8").unwrap();
+ assert_eq!(tag.to_string(), "v0.172.8");
+ }
+
+ #[test]
+ fn version_tag_preview_roundtrip() {
+ let tag = VersionTag::parse("v0.172.1-pre").unwrap();
+ assert_eq!(tag.to_string(), "v0.172.1-pre");
+ }
+
+ #[test]
+ fn sorted_orders_by_semver() {
+ let input = indoc! {"
+ v0.172.8
+ v0.170.1
+ v0.171.4
+ v0.170.2
+ v0.172.11
+ v0.171.3
+ v0.172.9
+ "};
+ let list = VersionTagList::from_str(input).unwrap().sorted();
+ for window in list.0.windows(2) {
+ assert!(
+ window[0].version() <= window[1].version(),
+ "{} should come before {}",
+ window[0].to_string(),
+ window[1].to_string()
+ );
+ }
+ assert_eq!(list.0[0].to_string(), "v0.170.1");
+ assert_eq!(list.0[list.0.len() - 1].to_string(), "v0.172.11");
+ }
+
+ #[test]
+ fn find_previous_minor_for_173_returns_172() {
+ let input = indoc! {"
+ v0.170.1
+ v0.170.2
+ v0.171.3
+ v0.171.4
+ v0.172.0
+ v0.172.8
+ v0.172.9
+ v0.172.11
+ "};
+ let list = VersionTagList::from_str(input).unwrap().sorted();
+ let target = VersionTag::parse("v0.173.0").unwrap();
+ let previous = list.find_previous_minor_version(&target).unwrap();
+ assert_eq!(previous.version().major, 0);
+ assert_eq!(previous.version().minor, 172);
+ assert_eq!(previous.version().patch, 0);
+ }
+
+ #[test]
+ fn find_previous_minor_skips_same_minor() {
+ let input = indoc! {"
+ v0.172.8
+ v0.172.9
+ v0.172.11
+ "};
+ let list = VersionTagList::from_str(input).unwrap().sorted();
+ let target = VersionTag::parse("v0.172.8").unwrap();
+ assert!(list.find_previous_minor_version(&target).is_none());
+ }
+
+ #[test]
+ fn find_previous_minor_with_major_version_gap() {
+ let input = indoc! {"
+ v0.172.0
+ v0.172.9
+ v0.172.11
+ "};
+ let list = VersionTagList::from_str(input).unwrap().sorted();
+ let target = VersionTag::parse("v1.0.0").unwrap();
+ let previous = list.find_previous_minor_version(&target).unwrap();
+ assert_eq!(previous.to_string(), "v0.172.0");
+ }
+
+ #[test]
+ fn find_previous_minor_requires_zero_patch_version() {
+ let input = indoc! {"
+ v0.172.1
+ v0.172.9
+ v0.172.11
+ "};
+ let list = VersionTagList::from_str(input).unwrap().sorted();
+ let target = VersionTag::parse("v1.0.0").unwrap();
+ assert!(list.find_previous_minor_version(&target).is_none());
+ }
+
+ #[test]
+ fn parse_tag_list_from_real_tags() {
+ let input = indoc! {"
+ v0.9999-temporary
+ vConradTest
+ v0.172.8
+ "};
+ let list = VersionTagList::from_str(input).unwrap();
+ assert_eq!(list.0.len(), 1);
+ assert_eq!(list.0[0].to_string(), "v0.172.8");
+ }
+
+ #[test]
+ fn parse_empty_tag_list_fails() {
+ let result = VersionTagList::from_str("");
+ assert!(result.is_err());
+ }
+
+ #[test]
+ fn pr_number_from_squash_merge_title() {
+ let line = format!(
+ "abc123{d}Author Name{d}author@email.com{d}Add cool feature (#12345)",
+ d = CommitDetails::FIELD_DELIMITER
+ );
+ let commit = CommitDetails::parse(&line, "").unwrap();
+ assert_eq!(commit.pr_number(), Some(12345));
+ }
+
+ #[test]
+ fn pr_number_missing() {
+ let line = format!(
+ "abc123{d}Author Name{d}author@email.com{d}Some commit without PR ref",
+ d = CommitDetails::FIELD_DELIMITER
+ );
+ let commit = CommitDetails::parse(&line, "").unwrap();
+ assert_eq!(commit.pr_number(), None);
+ }
+
+ #[test]
+ fn pr_number_takes_last_match() {
+ let line = format!(
+ "abc123{d}Author Name{d}author@email.com{d}Fix (#123) and refactor (#456)",
+ d = CommitDetails::FIELD_DELIMITER
+ );
+ let commit = CommitDetails::parse(&line, "").unwrap();
+ assert_eq!(commit.pr_number(), Some(456));
+ }
+
+ #[test]
+ fn co_authors_parsed_from_body() {
+ let line = format!(
+ "abc123{d}Author Name{d}author@email.com{d}Some title",
+ d = CommitDetails::FIELD_DELIMITER
+ );
+ let body = indoc! {"
+ Co-authored-by: Alice Smith <alice@example.com>
+ Co-authored-by: Bob Jones <bob@example.com>
+ "};
+ let commit = CommitDetails::parse(&line, body).unwrap();
+ let co_authors = commit.co_authors().unwrap();
+ assert_eq!(co_authors.len(), 2);
+ assert_eq!(
+ co_authors[0],
+ Committer::new("Alice Smith", "alice@example.com")
+ );
+ assert_eq!(
+ co_authors[1],
+ Committer::new("Bob Jones", "bob@example.com")
+ );
+ }
+
+ #[test]
+ fn no_co_authors_returns_none() {
+ let line = format!(
+ "abc123{d}Author Name{d}author@email.com{d}Some title",
+ d = CommitDetails::FIELD_DELIMITER
+ );
+ let commit = CommitDetails::parse(&line, "").unwrap();
+ assert!(commit.co_authors().is_none());
+ }
+
+ #[test]
+ fn commit_sha_short_returns_first_8_chars() {
+ let sha = CommitSha("abcdef1234567890abcdef1234567890abcdef12".into());
+ assert_eq!(sha.short(), "abcdef12");
+ }
+
+ #[test]
+ fn parse_commit_list_from_git_log_format() {
+ let fd = CommitDetails::FIELD_DELIMITER;
+ let bd = CommitDetails::BODY_DELIMITER;
+ let cd = CommitDetails::COMMIT_DELIMITER;
+
+ let input = format!(
+ "sha111{fd}Alice{fd}alice@test.com{fd}First commit (#100){bd}First body{cd}sha222{fd}Bob{fd}bob@test.com{fd}Second commit (#200){bd}Second body{cd}"
+ );
+
+ let list = CommitList::from_str(&input).unwrap();
+ assert_eq!(list.0.len(), 2);
+
+ assert_eq!(list.0[0].sha().0, "sha111");
+ assert_eq!(
+ list.0[0].author(),
+ &Committer::new("Alice", "alice@test.com")
+ );
+ assert_eq!(list.0[0].title(), "First commit (#100)");
+ assert_eq!(list.0[0].pr_number(), Some(100));
+ assert_eq!(list.0[0].body, "First body");
+
+ assert_eq!(list.0[1].sha().0, "sha222");
+ assert_eq!(list.0[1].author(), &Committer::new("Bob", "bob@test.com"));
+ assert_eq!(list.0[1].title(), "Second commit (#200)");
+ assert_eq!(list.0[1].pr_number(), Some(200));
+ assert_eq!(list.0[1].body, "Second body");
+ }
+}
@@ -0,0 +1,424 @@
+use std::{collections::HashMap, fmt, ops::Not, rc::Rc};
+
+use anyhow::Result;
+use derive_more::Deref;
+use serde::Deserialize;
+
+use crate::git::CommitSha;
+
+pub const PR_REVIEW_LABEL: &str = "PR state:needs review";
+
+#[derive(Debug, Clone)]
+pub struct GitHubUser {
+ pub login: String,
+}
+
+#[derive(Debug, Clone)]
+pub struct PullRequestData {
+ pub number: u64,
+ pub user: Option<GitHubUser>,
+ pub merged_by: Option<GitHubUser>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum ReviewState {
+ Approved,
+ Other,
+}
+
+#[derive(Debug, Clone)]
+pub struct PullRequestReview {
+ pub user: Option<GitHubUser>,
+ pub state: Option<ReviewState>,
+}
+
+#[derive(Debug, Clone)]
+pub struct PullRequestComment {
+ pub user: GitHubUser,
+ pub body: Option<String>,
+}
+
+#[derive(Debug, Deserialize, Clone, Deref, PartialEq, Eq)]
+pub struct GithubLogin {
+ login: String,
+}
+
+impl GithubLogin {
+ pub(crate) fn new(login: String) -> Self {
+ Self { login }
+ }
+}
+
+impl fmt::Display for GithubLogin {
+ fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(formatter, "@{}", self.login)
+ }
+}
+
+#[derive(Debug, Deserialize, Clone)]
+pub struct CommitAuthor {
+ name: String,
+ email: String,
+ user: Option<GithubLogin>,
+}
+
+impl CommitAuthor {
+ pub(crate) fn user(&self) -> Option<&GithubLogin> {
+ self.user.as_ref()
+ }
+}
+
+impl PartialEq for CommitAuthor {
+ fn eq(&self, other: &Self) -> bool {
+ self.user.as_ref().zip(other.user.as_ref()).map_or_else(
+ || self.email == other.email || self.name == other.name,
+ |(l, r)| l == r,
+ )
+ }
+}
+
+impl fmt::Display for CommitAuthor {
+ fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.user.as_ref() {
+ Some(user) => write!(formatter, "{} ({user})", self.name),
+ None => write!(formatter, "{} ({})", self.name, self.email),
+ }
+ }
+}
+
+#[derive(Debug, Deserialize)]
+pub struct CommitAuthors {
+ #[serde(rename = "author")]
+ primary_author: CommitAuthor,
+ #[serde(rename = "authors")]
+ co_authors: Vec<CommitAuthor>,
+}
+
+impl CommitAuthors {
+ pub fn co_authors(&self) -> Option<impl Iterator<Item = &CommitAuthor>> {
+ self.co_authors.is_empty().not().then(|| {
+ self.co_authors
+ .iter()
+ .filter(|co_author| *co_author != &self.primary_author)
+ })
+ }
+}
+
+#[derive(Debug, Deserialize, Deref)]
+pub struct AuthorsForCommits(HashMap<CommitSha, CommitAuthors>);
+
+#[async_trait::async_trait(?Send)]
+pub trait GitHubApiClient {
+ async fn get_pull_request(&self, pr_number: u64) -> Result<PullRequestData>;
+ async fn get_pull_request_reviews(&self, pr_number: u64) -> Result<Vec<PullRequestReview>>;
+ async fn get_pull_request_comments(&self, pr_number: u64) -> Result<Vec<PullRequestComment>>;
+ async fn get_commit_authors(&self, commit_shas: &[&CommitSha]) -> Result<AuthorsForCommits>;
+ async fn check_org_membership(&self, login: &GithubLogin) -> Result<bool>;
+ async fn ensure_pull_request_has_label(&self, label: &str, pr_number: u64) -> Result<()>;
+}
+
+pub struct GitHubClient {
+ api: Rc<dyn GitHubApiClient>,
+}
+
+impl GitHubClient {
+ pub fn new(api: Rc<dyn GitHubApiClient>) -> Self {
+ Self { api }
+ }
+
+ #[cfg(feature = "octo-client")]
+ pub async fn for_app(app_id: u64, app_private_key: &str) -> Result<Self> {
+ let client = OctocrabClient::new(app_id, app_private_key).await?;
+ Ok(Self::new(Rc::new(client)))
+ }
+
+ pub async fn get_pull_request(&self, pr_number: u64) -> Result<PullRequestData> {
+ self.api.get_pull_request(pr_number).await
+ }
+
+ pub async fn get_pull_request_reviews(&self, pr_number: u64) -> Result<Vec<PullRequestReview>> {
+ self.api.get_pull_request_reviews(pr_number).await
+ }
+
+ pub async fn get_pull_request_comments(
+ &self,
+ pr_number: u64,
+ ) -> Result<Vec<PullRequestComment>> {
+ self.api.get_pull_request_comments(pr_number).await
+ }
+
+ pub async fn get_commit_authors<'a>(
+ &self,
+ commit_shas: impl IntoIterator<Item = &'a CommitSha>,
+ ) -> Result<AuthorsForCommits> {
+ let shas: Vec<&CommitSha> = commit_shas.into_iter().collect();
+ self.api.get_commit_authors(&shas).await
+ }
+
+ pub async fn check_org_membership(&self, login: &GithubLogin) -> Result<bool> {
+ self.api.check_org_membership(login).await
+ }
+
+ pub async fn add_label_to_pull_request(&self, label: &str, pr_number: u64) -> Result<()> {
+ self.api
+ .ensure_pull_request_has_label(label, pr_number)
+ .await
+ }
+}
+
+#[cfg(feature = "octo-client")]
+mod octo_client {
+ use anyhow::{Context, Result};
+ use futures::TryStreamExt as _;
+ use itertools::Itertools;
+ use jsonwebtoken::EncodingKey;
+ use octocrab::{
+ Octocrab, Page, models::pulls::ReviewState as OctocrabReviewState,
+ service::middleware::cache::mem::InMemoryCache,
+ };
+ use serde::de::DeserializeOwned;
+ use tokio::pin;
+
+ use crate::git::CommitSha;
+
+ use super::{
+ AuthorsForCommits, GitHubApiClient, GitHubUser, GithubLogin, PullRequestComment,
+ PullRequestData, PullRequestReview, ReviewState,
+ };
+
+ const PAGE_SIZE: u8 = 100;
+ const ORG: &str = "zed-industries";
+ const REPO: &str = "zed";
+
+ pub struct OctocrabClient {
+ client: Octocrab,
+ }
+
+ impl OctocrabClient {
+ pub async fn new(app_id: u64, app_private_key: &str) -> Result<Self> {
+ let octocrab = Octocrab::builder()
+ .cache(InMemoryCache::new())
+ .app(
+ app_id.into(),
+ EncodingKey::from_rsa_pem(app_private_key.as_bytes())?,
+ )
+ .build()?;
+
+ let installations = octocrab
+ .apps()
+ .installations()
+ .send()
+ .await
+ .context("Failed to fetch installations")?
+ .take_items();
+
+ let installation_id = installations
+ .into_iter()
+ .find(|installation| installation.account.login == ORG)
+ .context("Could not find Zed repository in installations")?
+ .id;
+
+ let client = octocrab.installation(installation_id)?;
+ Ok(Self { client })
+ }
+
+ fn build_co_authors_query<'a>(shas: impl IntoIterator<Item = &'a CommitSha>) -> String {
+ const FRAGMENT: &str = r#"
+ ... on Commit {
+ author {
+ name
+ email
+ user { login }
+ }
+ authors(first: 10) {
+ nodes {
+ name
+ email
+ user { login }
+ }
+ }
+ }
+ "#;
+
+ let objects: String = shas
+ .into_iter()
+ .map(|commit_sha| {
+ format!(
+ "commit{sha}: object(oid: \"{sha}\") {{ {FRAGMENT} }}",
+ sha = **commit_sha
+ )
+ })
+ .join("\n");
+
+ format!("{{ repository(owner: \"{ORG}\", name: \"{REPO}\") {{ {objects} }} }}")
+ .replace("\n", "")
+ }
+
+ async fn graphql<R: octocrab::FromResponse>(
+ &self,
+ query: &serde_json::Value,
+ ) -> octocrab::Result<R> {
+ self.client.graphql(query).await
+ }
+
+ async fn get_all<T: DeserializeOwned + 'static>(
+ &self,
+ page: Page<T>,
+ ) -> octocrab::Result<Vec<T>> {
+ self.get_filtered(page, |_| true).await
+ }
+
+ async fn get_filtered<T: DeserializeOwned + 'static>(
+ &self,
+ page: Page<T>,
+ predicate: impl Fn(&T) -> bool,
+ ) -> octocrab::Result<Vec<T>> {
+ let stream = page.into_stream(&self.client);
+ pin!(stream);
+
+ let mut results = Vec::new();
+
+ while let Some(item) = stream.try_next().await?
+ && predicate(&item)
+ {
+ results.push(item);
+ }
+
+ Ok(results)
+ }
+ }
+
+ #[async_trait::async_trait(?Send)]
+ impl GitHubApiClient for OctocrabClient {
+ async fn get_pull_request(&self, pr_number: u64) -> Result<PullRequestData> {
+ let pr = self.client.pulls(ORG, REPO).get(pr_number).await?;
+ Ok(PullRequestData {
+ number: pr.number,
+ user: pr.user.map(|user| GitHubUser { login: user.login }),
+ merged_by: pr.merged_by.map(|user| GitHubUser { login: user.login }),
+ })
+ }
+
+ async fn get_pull_request_reviews(&self, pr_number: u64) -> Result<Vec<PullRequestReview>> {
+ let page = self
+ .client
+ .pulls(ORG, REPO)
+ .list_reviews(pr_number)
+ .per_page(PAGE_SIZE)
+ .send()
+ .await?;
+
+ let reviews = self.get_all(page).await?;
+
+ Ok(reviews
+ .into_iter()
+ .map(|review| PullRequestReview {
+ user: review.user.map(|user| GitHubUser { login: user.login }),
+ state: review.state.map(|state| match state {
+ OctocrabReviewState::Approved => ReviewState::Approved,
+ _ => ReviewState::Other,
+ }),
+ })
+ .collect())
+ }
+
+ async fn get_pull_request_comments(
+ &self,
+ pr_number: u64,
+ ) -> Result<Vec<PullRequestComment>> {
+ let page = self
+ .client
+ .issues(ORG, REPO)
+ .list_comments(pr_number)
+ .per_page(PAGE_SIZE)
+ .send()
+ .await?;
+
+ let comments = self.get_all(page).await?;
+
+ Ok(comments
+ .into_iter()
+ .map(|comment| PullRequestComment {
+ user: GitHubUser {
+ login: comment.user.login,
+ },
+ body: comment.body,
+ })
+ .collect())
+ }
+
+ async fn get_commit_authors(
+ &self,
+ commit_shas: &[&CommitSha],
+ ) -> Result<AuthorsForCommits> {
+ let query = Self::build_co_authors_query(commit_shas.iter().copied());
+ let query = serde_json::json!({ "query": query });
+ let mut response = self.graphql::<serde_json::Value>(&query).await?;
+
+ response
+ .get_mut("data")
+ .and_then(|data| data.get_mut("repository"))
+ .and_then(|repo| repo.as_object_mut())
+ .ok_or_else(|| anyhow::anyhow!("Unexpected response format!"))
+ .and_then(|commit_data| {
+ let mut response_map = serde_json::Map::with_capacity(commit_data.len());
+
+ for (key, value) in commit_data.iter_mut() {
+ let key_without_prefix = key.strip_prefix("commit").unwrap_or(key);
+ if let Some(authors) = value.get_mut("authors") {
+ if let Some(nodes) = authors.get("nodes") {
+ *authors = nodes.clone();
+ }
+ }
+
+ response_map.insert(key_without_prefix.to_owned(), value.clone());
+ }
+
+ serde_json::from_value(serde_json::Value::Object(response_map))
+ .context("Failed to deserialize commit authors")
+ })
+ }
+
+ async fn check_org_membership(&self, login: &GithubLogin) -> Result<bool> {
+ let page = self
+ .client
+ .orgs(ORG)
+ .list_members()
+ .per_page(PAGE_SIZE)
+ .send()
+ .await?;
+
+ let members = self.get_all(page).await?;
+
+ Ok(members
+ .into_iter()
+ .any(|member| member.login == login.as_str()))
+ }
+
+ async fn ensure_pull_request_has_label(&self, label: &str, pr_number: u64) -> Result<()> {
+ if self
+ .get_filtered(
+ self.client
+ .issues(ORG, REPO)
+ .list_labels_for_issue(pr_number)
+ .per_page(PAGE_SIZE)
+ .send()
+ .await?,
+ |pr_label| pr_label.name == label,
+ )
+ .await
+ .is_ok_and(|l| l.is_empty())
+ {
+ self.client
+ .issues(ORG, REPO)
+ .add_labels(pr_number, &[label.to_owned()])
+ .await?;
+ }
+
+ Ok(())
+ }
+ }
+}
+
+#[cfg(feature = "octo-client")]
+pub use octo_client::OctocrabClient;
@@ -0,0 +1,4 @@
+pub mod checks;
+pub mod git;
+pub mod github;
+pub mod report;
@@ -0,0 +1,446 @@
+use std::{
+ fs::{self, File},
+ io::{BufWriter, Write},
+ path::Path,
+};
+
+use anyhow::Context as _;
+use derive_more::Display;
+use itertools::{Either, Itertools};
+
+use crate::{
+ checks::{ReviewFailure, ReviewResult, ReviewSuccess},
+ git::CommitDetails,
+};
+
+const PULL_REQUEST_BASE_URL: &str = "https://github.com/zed-industries/zed/pull";
+
+#[derive(Debug)]
+pub struct ReportEntry<R> {
+ pub commit: CommitDetails,
+ reason: R,
+}
+
+impl<R: ToString> ReportEntry<R> {
+ fn commit_cell(&self) -> String {
+ let title = escape_markdown_link_text(self.commit.title());
+
+ match self.commit.pr_number() {
+ Some(pr_number) => format!("[{title}]({PULL_REQUEST_BASE_URL}/{pr_number})"),
+ None => escape_markdown_table_text(self.commit.title()),
+ }
+ }
+
+ fn pull_request_cell(&self) -> String {
+ self.commit
+ .pr_number()
+ .map(|pr_number| format!("#{pr_number}"))
+ .unwrap_or_else(|| "—".to_owned())
+ }
+
+ fn author_cell(&self) -> String {
+ escape_markdown_table_text(&self.commit.author().to_string())
+ }
+
+ fn reason_cell(&self) -> String {
+ escape_markdown_table_text(&self.reason.to_string())
+ }
+}
+
+impl ReportEntry<ReviewFailure> {
+ fn issue_kind(&self) -> IssueKind {
+ match self.reason {
+ ReviewFailure::Other(_) => IssueKind::Error,
+ _ => IssueKind::NotReviewed,
+ }
+ }
+}
+
+impl ReportEntry<ReviewSuccess> {
+ fn reviewers_cell(&self) -> String {
+ match &self.reason.reviewers() {
+ Ok(reviewers) => escape_markdown_table_text(&reviewers),
+ Err(_) => "—".to_owned(),
+ }
+ }
+}
+
+#[derive(Debug, Default)]
+pub struct ReportSummary {
+ pub pull_requests: usize,
+ pub reviewed: usize,
+ pub not_reviewed: usize,
+ pub errors: usize,
+}
+
+pub enum ReportReviewSummary {
+ MissingReviews,
+ MissingReviewsWithErrors,
+ NoIssuesFound,
+}
+
+impl ReportSummary {
+ fn from_entries(entries: &[ReportEntry<ReviewResult>]) -> Self {
+ Self {
+ pull_requests: entries
+ .iter()
+ .filter_map(|entry| entry.commit.pr_number())
+ .unique()
+ .count(),
+ reviewed: entries.iter().filter(|entry| entry.reason.is_ok()).count(),
+ not_reviewed: entries
+ .iter()
+ .filter(|entry| {
+ matches!(
+ entry.reason,
+ Err(ReviewFailure::NoPullRequestFound | ReviewFailure::Unreviewed)
+ )
+ })
+ .count(),
+ errors: entries
+ .iter()
+ .filter(|entry| matches!(entry.reason, Err(ReviewFailure::Other(_))))
+ .count(),
+ }
+ }
+
+ pub fn review_summary(&self) -> ReportReviewSummary {
+ match self.not_reviewed {
+ 0 if self.errors == 0 => ReportReviewSummary::NoIssuesFound,
+ 1.. if self.errors == 0 => ReportReviewSummary::MissingReviews,
+ _ => ReportReviewSummary::MissingReviewsWithErrors,
+ }
+ }
+
+ fn has_errors(&self) -> bool {
+ self.errors > 0
+ }
+}
+
+#[derive(Clone, Copy, Debug, Display, PartialEq, Eq, PartialOrd, Ord)]
+enum IssueKind {
+ #[display("Error")]
+ Error,
+ #[display("Not reviewed")]
+ NotReviewed,
+}
+
+#[derive(Debug, Default)]
+pub struct Report {
+ entries: Vec<ReportEntry<ReviewResult>>,
+}
+
+impl Report {
+ pub fn new() -> Self {
+ Self::default()
+ }
+
+ pub fn add(&mut self, commit: CommitDetails, result: ReviewResult) {
+ self.entries.push(ReportEntry {
+ commit,
+ reason: result,
+ });
+ }
+
+ pub fn errors(&self) -> impl Iterator<Item = &ReportEntry<ReviewResult>> {
+ self.entries.iter().filter(|entry| entry.reason.is_err())
+ }
+
+ pub fn summary(&self) -> ReportSummary {
+ ReportSummary::from_entries(&self.entries)
+ }
+
+ pub fn write_markdown(self, path: impl AsRef<Path>) -> anyhow::Result<()> {
+ let path = path.as_ref();
+
+ if let Some(parent) = path
+ .parent()
+ .filter(|parent| !parent.as_os_str().is_empty())
+ {
+ fs::create_dir_all(parent).with_context(|| {
+ format!(
+ "Failed to create parent directory for markdown report at {}",
+ path.display()
+ )
+ })?;
+ }
+
+ let summary = self.summary();
+ let (successes, mut issues): (Vec<_>, Vec<_>) =
+ self.entries
+ .into_iter()
+ .partition_map(|entry| match entry.reason {
+ Ok(success) => Either::Left(ReportEntry {
+ reason: success,
+ commit: entry.commit,
+ }),
+ Err(fail) => Either::Right(ReportEntry {
+ reason: fail,
+ commit: entry.commit,
+ }),
+ });
+
+ issues.sort_by_key(|entry| entry.issue_kind());
+
+ let file = File::create(path)
+ .with_context(|| format!("Failed to create markdown report at {}", path.display()))?;
+ let mut writer = BufWriter::new(file);
+
+ writeln!(writer, "# Compliance report")?;
+ writeln!(writer)?;
+ writeln!(writer, "## Overview")?;
+ writeln!(writer)?;
+ writeln!(writer, "- PRs: {}", summary.pull_requests)?;
+ writeln!(writer, "- Reviewed: {}", summary.reviewed)?;
+ writeln!(writer, "- Not reviewed: {}", summary.not_reviewed)?;
+ if summary.has_errors() {
+ writeln!(writer, "- Errors: {}", summary.errors)?;
+ }
+ writeln!(writer)?;
+
+ write_issue_table(&mut writer, &issues, &summary)?;
+ write_success_table(&mut writer, &successes)?;
+
+ writer
+ .flush()
+ .with_context(|| format!("Failed to flush markdown report to {}", path.display()))
+ }
+}
+
+fn write_issue_table(
+ writer: &mut impl Write,
+ issues: &[ReportEntry<ReviewFailure>],
+ summary: &ReportSummary,
+) -> std::io::Result<()> {
+ if summary.has_errors() {
+ writeln!(writer, "## Errors and unreviewed commits")?;
+ } else {
+ writeln!(writer, "## Unreviewed commits")?;
+ }
+ writeln!(writer)?;
+
+ if issues.is_empty() {
+ if summary.has_errors() {
+ writeln!(writer, "No errors or unreviewed commits found.")?;
+ } else {
+ writeln!(writer, "No unreviewed commits found.")?;
+ }
+ writeln!(writer)?;
+ return Ok(());
+ }
+
+ writeln!(writer, "| Commit | PR | Author | Outcome | Reason |")?;
+ writeln!(writer, "| --- | --- | --- | --- | --- |")?;
+
+ for entry in issues {
+ let issue_kind = entry.issue_kind();
+ writeln!(
+ writer,
+ "| {} | {} | {} | {} | {} |",
+ entry.commit_cell(),
+ entry.pull_request_cell(),
+ entry.author_cell(),
+ issue_kind,
+ entry.reason_cell(),
+ )?;
+ }
+
+ writeln!(writer)?;
+ Ok(())
+}
+
+fn write_success_table(
+ writer: &mut impl Write,
+ successful_entries: &[ReportEntry<ReviewSuccess>],
+) -> std::io::Result<()> {
+ writeln!(writer, "## Successful commits")?;
+ writeln!(writer)?;
+
+ if successful_entries.is_empty() {
+ writeln!(writer, "No successful commits found.")?;
+ writeln!(writer)?;
+ return Ok(());
+ }
+
+ writeln!(writer, "| Commit | PR | Author | Reviewers | Reason |")?;
+ writeln!(writer, "| --- | --- | --- | --- | --- |")?;
+
+ for entry in successful_entries {
+ writeln!(
+ writer,
+ "| {} | {} | {} | {} | {} |",
+ entry.commit_cell(),
+ entry.pull_request_cell(),
+ entry.author_cell(),
+ entry.reviewers_cell(),
+ entry.reason_cell(),
+ )?;
+ }
+
+ writeln!(writer)?;
+ Ok(())
+}
+
+fn escape_markdown_link_text(input: &str) -> String {
+ escape_markdown_table_text(input)
+ .replace('[', r"\[")
+ .replace(']', r"\]")
+}
+
+fn escape_markdown_table_text(input: &str) -> String {
+ input
+ .replace('\\', r"\\")
+ .replace('|', r"\|")
+ .replace('\r', "")
+ .replace('\n', "<br>")
+}
+
+#[cfg(test)]
+mod tests {
+ use std::str::FromStr;
+
+ use crate::{
+ checks::{ReviewFailure, ReviewSuccess},
+ git::{CommitDetails, CommitList},
+ github::{GitHubUser, PullRequestReview, ReviewState},
+ };
+
+ use super::{Report, ReportReviewSummary};
+
+ fn make_commit(
+ sha: &str,
+ author_name: &str,
+ author_email: &str,
+ title: &str,
+ body: &str,
+ ) -> CommitDetails {
+ let formatted = format!(
+ "{sha}|field-delimiter|{author_name}|field-delimiter|{author_email}|field-delimiter|{title}|body-delimiter|{body}|commit-delimiter|"
+ );
+ CommitList::from_str(&formatted)
+ .expect("test commit should parse")
+ .into_iter()
+ .next()
+ .expect("should have one commit")
+ }
+
+ fn reviewed() -> ReviewSuccess {
+ ReviewSuccess::PullRequestReviewed(vec![PullRequestReview {
+ user: Some(GitHubUser {
+ login: "reviewer".to_owned(),
+ }),
+ state: Some(ReviewState::Approved),
+ }])
+ }
+
+ #[test]
+ fn report_summary_counts_are_accurate() {
+ let mut report = Report::new();
+
+ report.add(
+ make_commit(
+ "aaa",
+ "Alice",
+ "alice@test.com",
+ "Reviewed commit (#100)",
+ "",
+ ),
+ Ok(reviewed()),
+ );
+ report.add(
+ make_commit("bbb", "Bob", "bob@test.com", "Unreviewed commit (#200)", ""),
+ Err(ReviewFailure::Unreviewed),
+ );
+ report.add(
+ make_commit("ccc", "Carol", "carol@test.com", "No PR commit", ""),
+ Err(ReviewFailure::NoPullRequestFound),
+ );
+ report.add(
+ make_commit("ddd", "Dave", "dave@test.com", "Error commit (#300)", ""),
+ Err(ReviewFailure::Other(anyhow::anyhow!("some error"))),
+ );
+
+ let summary = report.summary();
+ assert_eq!(summary.pull_requests, 3);
+ assert_eq!(summary.reviewed, 1);
+ assert_eq!(summary.not_reviewed, 2);
+ assert_eq!(summary.errors, 1);
+ }
+
+ #[test]
+ fn report_summary_all_reviewed_is_no_issues() {
+ let mut report = Report::new();
+
+ report.add(
+ make_commit("aaa", "Alice", "alice@test.com", "First (#100)", ""),
+ Ok(reviewed()),
+ );
+ report.add(
+ make_commit("bbb", "Bob", "bob@test.com", "Second (#200)", ""),
+ Ok(reviewed()),
+ );
+
+ let summary = report.summary();
+ assert!(matches!(
+ summary.review_summary(),
+ ReportReviewSummary::NoIssuesFound
+ ));
+ }
+
+ #[test]
+ fn report_summary_missing_reviews_only() {
+ let mut report = Report::new();
+
+ report.add(
+ make_commit("aaa", "Alice", "alice@test.com", "Reviewed (#100)", ""),
+ Ok(reviewed()),
+ );
+ report.add(
+ make_commit("bbb", "Bob", "bob@test.com", "Unreviewed (#200)", ""),
+ Err(ReviewFailure::Unreviewed),
+ );
+
+ let summary = report.summary();
+ assert!(matches!(
+ summary.review_summary(),
+ ReportReviewSummary::MissingReviews
+ ));
+ }
+
+ #[test]
+ fn report_summary_errors_and_missing_reviews() {
+ let mut report = Report::new();
+
+ report.add(
+ make_commit("aaa", "Alice", "alice@test.com", "Unreviewed (#100)", ""),
+ Err(ReviewFailure::Unreviewed),
+ );
+ report.add(
+ make_commit("bbb", "Bob", "bob@test.com", "Errored (#200)", ""),
+ Err(ReviewFailure::Other(anyhow::anyhow!("check failed"))),
+ );
+
+ let summary = report.summary();
+ assert!(matches!(
+ summary.review_summary(),
+ ReportReviewSummary::MissingReviewsWithErrors
+ ));
+ }
+
+ #[test]
+ fn report_summary_deduplicates_pull_requests() {
+ let mut report = Report::new();
+
+ report.add(
+ make_commit("aaa", "Alice", "alice@test.com", "First change (#100)", ""),
+ Ok(reviewed()),
+ );
+ report.add(
+ make_commit("bbb", "Bob", "bob@test.com", "Second change (#100)", ""),
+ Ok(reviewed()),
+ );
+
+ let summary = report.summary();
+ assert_eq!(summary.pull_requests, 1);
+ }
+}
@@ -15,7 +15,8 @@ backtrace.workspace = true
cargo_metadata.workspace = true
cargo_toml.workspace = true
clap = { workspace = true, features = ["derive"] }
-toml.workspace = true
+compliance = { workspace = true, features = ["octo-client"] }
+gh-workflow.workspace = true
indoc.workspace = true
indexmap.workspace = true
itertools.workspace = true
@@ -24,5 +25,6 @@ serde.workspace = true
serde_json.workspace = true
serde_yaml = "0.9.34"
strum.workspace = true
+tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }
+toml.workspace = true
toml_edit.workspace = true
-gh-workflow.workspace = true
@@ -15,6 +15,7 @@ struct Args {
enum CliCommand {
/// Runs `cargo clippy`.
Clippy(tasks::clippy::ClippyArgs),
+ Compliance(tasks::compliance::ComplianceArgs),
Licenses(tasks::licenses::LicensesArgs),
/// Checks that packages conform to a set of standards.
PackageConformity(tasks::package_conformity::PackageConformityArgs),
@@ -31,6 +32,7 @@ fn main() -> Result<()> {
match args.command {
CliCommand::Clippy(args) => tasks::clippy::run_clippy(args),
+ CliCommand::Compliance(args) => tasks::compliance::check_compliance(args),
CliCommand::Licenses(args) => tasks::licenses::run_licenses(args),
CliCommand::PackageConformity(args) => {
tasks::package_conformity::run_package_conformity(args)
@@ -1,4 +1,5 @@
pub mod clippy;
+pub mod compliance;
pub mod licenses;
pub mod package_conformity;
pub mod publish_gpui;
@@ -0,0 +1,135 @@
+use std::path::PathBuf;
+
+use anyhow::{Context, Result};
+use clap::Parser;
+
+use compliance::{
+ checks::Reporter,
+ git::{CommitsFromVersionToHead, GetVersionTags, GitCommand, VersionTag},
+ github::GitHubClient,
+ report::ReportReviewSummary,
+};
+
+#[derive(Parser)]
+pub struct ComplianceArgs {
+ #[arg(value_parser = VersionTag::parse)]
+ // The version to be on the lookout for
+ pub(crate) version_tag: VersionTag,
+ #[arg(long)]
+ // The markdown file to write the compliance report to
+ report_path: PathBuf,
+ #[arg(long)]
+ // An optional branch to use instead of the determined version branch
+ branch: Option<String>,
+}
+
+impl ComplianceArgs {
+ pub(crate) fn version_tag(&self) -> &VersionTag {
+ &self.version_tag
+ }
+
+ fn version_branch(&self) -> String {
+ self.branch.clone().unwrap_or_else(|| {
+ format!(
+ "v{major}.{minor}.x",
+ major = self.version_tag().version().major,
+ minor = self.version_tag().version().minor
+ )
+ })
+ }
+}
+
+async fn check_compliance_impl(args: ComplianceArgs) -> Result<()> {
+ let app_id = std::env::var("GITHUB_APP_ID").context("Missing GITHUB_APP_ID")?;
+ let key = std::env::var("GITHUB_APP_KEY").context("Missing GITHUB_APP_KEY")?;
+
+ let tag = args.version_tag();
+
+ let previous_version = GitCommand::run(GetVersionTags)?
+ .sorted()
+ .find_previous_minor_version(&tag)
+ .cloned()
+ .ok_or_else(|| {
+ anyhow::anyhow!(
+ "Could not find previous version for tag {tag}",
+ tag = tag.to_string()
+ )
+ })?;
+
+ println!(
+ "Checking compliance for version {} with version {} as base",
+ tag.version(),
+ previous_version.version()
+ );
+
+ let commits = GitCommand::run(CommitsFromVersionToHead::new(
+ previous_version,
+ args.version_branch(),
+ ))?;
+
+ let Some(range) = commits.range() else {
+ anyhow::bail!("No commits found to check");
+ };
+
+ println!("Checking commit range {range}, {} total", commits.len());
+
+ let client = GitHubClient::for_app(
+ app_id.parse().context("Failed to parse app ID as int")?,
+ key.as_ref(),
+ )
+ .await?;
+
+ println!("Initialized GitHub client for app ID {app_id}");
+
+ let report = Reporter::new(commits, &client).generate_report().await?;
+
+ println!(
+ "Generated report for version {}",
+ args.version_tag().to_string()
+ );
+
+ let summary = report.summary();
+
+ println!(
+ "Applying compliance labels to {} pull requests",
+ summary.pull_requests
+ );
+
+ for report in report.errors() {
+ if let Some(pr_number) = report.commit.pr_number() {
+ println!("Adding review label to PR {}...", pr_number);
+
+ client
+ .add_label_to_pull_request(compliance::github::PR_REVIEW_LABEL, pr_number)
+ .await?;
+ }
+ }
+
+ let report_path = args.report_path.with_extension("md");
+
+ report.write_markdown(&report_path)?;
+
+ println!("Wrote compliance report to {}", report_path.display());
+
+ match summary.review_summary() {
+ ReportReviewSummary::MissingReviews => Err(anyhow::anyhow!(
+ "Compliance check failed, found {} commits not reviewed",
+ summary.not_reviewed
+ )),
+ ReportReviewSummary::MissingReviewsWithErrors => Err(anyhow::anyhow!(
+ "Compliance check failed with {} unreviewed commits and {} other issues",
+ summary.not_reviewed,
+ summary.errors
+ )),
+ ReportReviewSummary::NoIssuesFound => {
+ println!("No issues found, compliance check passed.");
+ Ok(())
+ }
+ }
+}
+
+pub fn check_compliance(args: ComplianceArgs) -> Result<()> {
+ tokio::runtime::Runtime::new()
+ .context("Failed to create tokio runtime")
+ .and_then(|handle| handle.block_on(check_compliance_impl(args)))
+}
@@ -11,6 +11,7 @@ mod autofix_pr;
mod bump_patch_version;
mod cherry_pick;
mod compare_perf;
+mod compliance_check;
mod danger;
mod deploy_collab;
mod extension_auto_bump;
@@ -197,6 +198,7 @@ pub fn run_workflows(args: GenerateWorkflowArgs) -> Result<()> {
WorkflowFile::zed(bump_patch_version::bump_patch_version),
WorkflowFile::zed(cherry_pick::cherry_pick),
WorkflowFile::zed(compare_perf::compare_perf),
+ WorkflowFile::zed(compliance_check::compliance_check),
WorkflowFile::zed(danger::danger),
WorkflowFile::zed(deploy_collab::deploy_collab),
WorkflowFile::zed(extension_bump::extension_bump),
@@ -2,7 +2,7 @@ use gh_workflow::*;
use crate::tasks::workflows::{
runners,
- steps::{self, FluentBuilder, NamedJob, named},
+ steps::{self, FluentBuilder, NamedJob, RepositoryTarget, TokenPermissions, named},
vars::{self, StepOutput, WorkflowInput},
};
@@ -161,7 +161,13 @@ fn commit_changes(pr_number: &WorkflowInput, autofix_job: &NamedJob) -> NamedJob
.add_env(("GITHUB_TOKEN", token))
}
- let (authenticate, token) = steps::authenticate_as_zippy();
+ let (authenticate, token) = steps::authenticate_as_zippy()
+ .for_repository(RepositoryTarget::current())
+ .with_permissions([
+ (TokenPermissions::Contents, Level::Write),
+ (TokenPermissions::Workflows, Level::Write),
+ ])
+ .into();
named::job(
Job::default()
@@ -63,7 +63,7 @@ fn run_bump_patch_version(branch: &WorkflowInput) -> steps::NamedJob {
.add_env(("GITHUB_TOKEN", token))
}
- let (authenticate, token) = steps::authenticate_as_zippy();
+ let (authenticate, token) = steps::authenticate_as_zippy().into();
named::job(
Job::default()
@@ -2,7 +2,7 @@ use gh_workflow::*;
use crate::tasks::workflows::{
runners,
- steps::{self, NamedJob, named},
+ steps::{self, NamedJob, RepositoryTarget, TokenPermissions, named},
vars::{StepOutput, WorkflowInput},
};
@@ -44,7 +44,14 @@ fn run_cherry_pick(
.add_env(("GITHUB_TOKEN", token))
}
- let (authenticate, token) = steps::authenticate_as_zippy();
+ let (authenticate, token) = steps::authenticate_as_zippy()
+ .for_repository(RepositoryTarget::current())
+ .with_permissions([
+ (TokenPermissions::Contents, Level::Write),
+ (TokenPermissions::Workflows, Level::Write),
+ (TokenPermissions::PullRequests, Level::Write),
+ ])
+ .into();
named::job(
Job::default()
@@ -0,0 +1,66 @@
+use gh_workflow::{Event, Expression, Job, Run, Schedule, Step, Workflow};
+
+use crate::tasks::workflows::{
+ runners,
+ steps::{self, CommonJobConditions, named},
+ vars::{self, StepOutput},
+};
+
+pub fn compliance_check() -> Workflow {
+ let check = scheduled_compliance_check();
+
+ named::workflow()
+ .on(Event::default().schedule([Schedule::new("30 17 * * 2")]))
+ .add_env(("CARGO_TERM_COLOR", "always"))
+ .add_job(check.name, check.job)
+}
+
+fn scheduled_compliance_check() -> steps::NamedJob {
+ let determine_version_step = named::bash(indoc::indoc! {r#"
+ VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' crates/zed/Cargo.toml | tr -d '[:space:]')
+ if [ -z "$VERSION" ]; then
+ echo "Could not determine version from crates/zed/Cargo.toml"
+ exit 1
+ fi
+ TAG="v${VERSION}-pre"
+ echo "Checking compliance for $TAG"
+ echo "tag=$TAG" >> "$GITHUB_OUTPUT"
+ "#})
+ .id("determine-version");
+
+ let tag_output = StepOutput::new(&determine_version_step, "tag");
+
+ fn run_compliance_check(tag: &StepOutput) -> Step<Run> {
+ named::bash(
+ r#"cargo xtask compliance "$LATEST_TAG" --branch main --report-path target/compliance-report"#,
+ )
+ .id("run-compliance-check")
+ .add_env(("LATEST_TAG", tag.to_string()))
+ .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID))
+ .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
+ }
+
+ fn send_failure_slack_notification(tag: &StepOutput) -> Step<Run> {
+ named::bash(indoc::indoc! {r#"
+ MESSAGE="⚠️ Scheduled compliance check failed for upcoming preview release $LATEST_TAG: There are PRs with missing reviews."
+
+ curl -X POST -H 'Content-type: application/json' \
+ --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
+ "$SLACK_WEBHOOK"
+ "#})
+ .if_condition(Expression::new("failure()"))
+ .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES))
+ .add_env(("LATEST_TAG", tag.to_string()))
+ }
+
+ named::job(
+ Job::default()
+ .with_repository_owner_guard()
+ .runs_on(runners::LINUX_SMALL)
+ .add_step(steps::checkout_repo().with_full_history())
+ .add_step(steps::cache_rust_dependencies_namespace())
+ .add_step(determine_version_step)
+ .add_step(run_compliance_check(&tag_output))
+ .add_step(send_failure_slack_notification(&tag_output)),
+ )
+}
@@ -359,7 +359,8 @@ fn trigger_release(
let extension_registry = RepositoryTarget::new("zed-industries", &["extensions"]);
let (generate_token, generated_token) =
generate_token(&app_id.to_string(), &app_secret.to_string())
- .for_repository(extension_registry);
+ .for_repository(extension_registry)
+ .into();
let (get_extension_id, extension_id) = get_extension_id();
let (release_action, pull_request_number) = release_action(extension_id, tag, &generated_token);
@@ -6,6 +6,7 @@ use indoc::indoc;
use serde_json::json;
use crate::tasks::workflows::steps::CheckoutStep;
+use crate::tasks::workflows::steps::TokenPermissions;
use crate::tasks::workflows::steps::cache_rust_dependencies_namespace;
use crate::tasks::workflows::vars::JobOutput;
use crate::tasks::workflows::{
@@ -309,13 +310,17 @@ fn rollout_workflows_to_extension(
}
let (authenticate, token) =
- generate_token(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY).for_repository(
- RepositoryTarget::new("zed-extensions", &["${{ matrix.repo }}"]).permissions([
- ("permission-pull-requests".to_owned(), Level::Write),
- ("permission-contents".to_owned(), Level::Write),
- ("permission-workflows".to_owned(), Level::Write),
- ]),
- );
+ generate_token(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY)
+ .for_repository(RepositoryTarget::new(
+ "zed-extensions",
+ &["${{ matrix.repo }}"],
+ ))
+ .with_permissions([
+ (TokenPermissions::PullRequests, Level::Write),
+ (TokenPermissions::Contents, Level::Write),
+ (TokenPermissions::Workflows, Level::Write),
+ ])
+ .into();
let (calculate_short_sha, short_sha) = get_short_sha();
@@ -372,10 +377,10 @@ fn create_rollout_tag(rollout_job: &NamedJob, filter_repos_input: &WorkflowInput
}
let (authenticate, token) =
- generate_token(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY).for_repository(
- RepositoryTarget::current()
- .permissions([("permission-contents".to_owned(), Level::Write)]),
- );
+ generate_token(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY)
+ .for_repository(RepositoryTarget::current())
+ .with_permissions([(TokenPermissions::Contents, Level::Write)])
+ .into();
let job = Job::default()
.needs([rollout_job.name.clone()])
@@ -119,7 +119,8 @@ fn update_sha_in_extensions(publish_job: &NamedJob) -> NamedJob {
let extensions_repo = RepositoryTarget::new("zed-industries", &["extensions"]);
let (generate_token, generated_token) =
generate_token(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY)
- .for_repository(extensions_repo);
+ .for_repository(extensions_repo)
+ .into();
fn checkout_extensions_repo(token: &StepOutput) -> Step<Use> {
named::uses(
@@ -1,11 +1,13 @@
-use gh_workflow::{Event, Expression, Push, Run, Step, Use, Workflow, ctx::Context};
+use gh_workflow::{Event, Expression, Job, Push, Run, Step, Use, Workflow, ctx::Context};
use indoc::formatdoc;
use crate::tasks::workflows::{
run_bundling::{bundle_linux, bundle_mac, bundle_windows},
run_tests,
runners::{self, Arch, Platform},
- steps::{self, FluentBuilder, NamedJob, dependant_job, named, release_job},
+ steps::{
+ self, CommonJobConditions, FluentBuilder, NamedJob, dependant_job, named, release_job,
+ },
vars::{self, StepOutput, assets},
};
@@ -22,6 +24,7 @@ pub(crate) fn release() -> Workflow {
let check_scripts = run_tests::check_scripts();
let create_draft_release = create_draft_release();
+ let compliance = compliance_check();
let bundle = ReleaseBundleJobs {
linux_aarch64: bundle_linux(
@@ -92,6 +95,7 @@ pub(crate) fn release() -> Workflow {
.add_job(windows_clippy.name, windows_clippy.job)
.add_job(check_scripts.name, check_scripts.job)
.add_job(create_draft_release.name, create_draft_release.job)
+ .add_job(compliance.name, compliance.job)
.map(|mut workflow| {
for job in bundle.into_jobs() {
workflow = workflow.add_job(job.name, job.job);
@@ -149,6 +153,59 @@ pub(crate) fn create_sentry_release() -> Step<Use> {
.add_with(("environment", "production"))
}
+fn compliance_check() -> NamedJob {
+ fn run_compliance_check() -> Step<Run> {
+ named::bash(
+ r#"cargo xtask compliance "$GITHUB_REF_NAME" --report-path "$COMPLIANCE_FILE_OUTPUT""#,
+ )
+ .id("run-compliance-check")
+ .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID))
+ .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
+ }
+
+ fn send_compliance_slack_notification() -> Step<Run> {
+ named::bash(indoc::indoc! {r#"
+ if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
+ STATUS="✅ Compliance check passed for $GITHUB_REF_NAME"
+ else
+ STATUS="❌ Compliance check failed for $GITHUB_REF_NAME"
+ fi
+
+ REPORT_CONTENT=""
+ if [ -f "$COMPLIANCE_FILE_OUTPUT" ]; then
+ REPORT_CONTENT=$(cat "$REPORT_FILE")
+ fi
+
+ MESSAGE=$(printf "%s\n\n%s" "$STATUS" "$REPORT_CONTENT")
+
+ curl -X POST -H 'Content-type: application/json' \
+ --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
+ "$SLACK_WEBHOOK"
+ "#})
+ .if_condition(Expression::new("always()"))
+ .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES))
+ .add_env((
+ "COMPLIANCE_OUTCOME",
+ "${{ steps.run-compliance-check.outcome }}",
+ ))
+ }
+
+ named::job(
+ Job::default()
+ .add_env(("COMPLIANCE_FILE_PATH", "compliance.md"))
+ .with_repository_owner_guard()
+ .runs_on(runners::LINUX_DEFAULT)
+ .add_step(
+ steps::checkout_repo()
+ .with_full_history()
+ .with_ref(Context::github().ref_()),
+ )
+ .add_step(steps::cache_rust_dependencies_namespace())
+ .add_step(run_compliance_check())
+ .add_step(send_compliance_slack_notification()),
+ )
+}
+
fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob {
let expected_assets: Vec<String> = assets::all().iter().map(|a| format!("\"{a}\"")).collect();
let expected_assets_json = format!("[{}]", expected_assets.join(", "));
@@ -171,15 +228,59 @@ fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob {
"#,
};
+ fn run_post_upload_compliance_check() -> Step<Run> {
+ named::bash(
+ r#"cargo xtask compliance "$GITHUB_REF_NAME" --report-path target/compliance-report"#,
+ )
+ .id("run-post-upload-compliance-check")
+ .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID))
+ .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
+ }
+
+ fn send_post_upload_compliance_notification() -> Step<Run> {
+ named::bash(indoc::indoc! {r#"
+ if [ -z "$COMPLIANCE_OUTCOME" ] || [ "$COMPLIANCE_OUTCOME" == "skipped" ]; then
+ echo "Compliance check was skipped, not sending notification"
+ exit 0
+ fi
+
+ TAG="$GITHUB_REF_NAME"
+
+ if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
+ MESSAGE="✅ Post-upload compliance re-check passed for $TAG"
+ else
+ MESSAGE="❌ Post-upload compliance re-check failed for $TAG"
+ fi
+
+ curl -X POST -H 'Content-type: application/json' \
+ --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
+ "$SLACK_WEBHOOK"
+ "#})
+ .if_condition(Expression::new("always()"))
+ .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES))
+ .add_env((
+ "COMPLIANCE_OUTCOME",
+ "${{ steps.run-post-upload-compliance-check.outcome }}",
+ ))
+ }
+
named::job(
- dependant_job(deps).runs_on(runners::LINUX_SMALL).add_step(
- named::bash(&validation_script).add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)),
- ),
+ dependant_job(deps)
+ .runs_on(runners::LINUX_SMALL)
+ .add_step(named::bash(&validation_script).add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)))
+ .add_step(
+ steps::checkout_repo()
+ .with_full_history()
+ .with_ref(Context::github().ref_()),
+ )
+ .add_step(steps::cache_rust_dependencies_namespace())
+ .add_step(run_post_upload_compliance_check())
+ .add_step(send_post_upload_compliance_notification()),
)
}
fn auto_release_preview(deps: &[&NamedJob]) -> NamedJob {
- let (authenticate, token) = steps::authenticate_as_zippy();
+ let (authenticate, token) = steps::authenticate_as_zippy().into();
named::job(
dependant_job(deps)
@@ -255,7 +356,7 @@ fn create_draft_release() -> NamedJob {
.add_step(
steps::checkout_repo()
.with_custom_fetch_depth(25)
- .with_ref("${{ github.ref }}"),
+ .with_ref(Context::github().ref_()),
)
.add_step(steps::script("script/determine-release-channel"))
.add_step(steps::script("mkdir -p target/"))
@@ -517,20 +517,50 @@ pub fn git_checkout(ref_name: &dyn std::fmt::Display) -> Step<Run> {
.add_env(("REF_NAME", ref_name.to_string()))
}
+/// Non-exhaustive list of the permissions to be set for a GitHub app token.
+///
+/// See https://github.com/actions/create-github-app-token?tab=readme-ov-file#permission-permission-name
+/// and beyond for a full list of available permissions.
+#[allow(unused)]
+pub(crate) enum TokenPermissions {
+ Contents,
+ Issues,
+ PullRequests,
+ Workflows,
+}
+
+impl TokenPermissions {
+ pub fn environment_name(&self) -> &'static str {
+ match self {
+ TokenPermissions::Contents => "permission-contents",
+ TokenPermissions::Issues => "permission-issues",
+ TokenPermissions::PullRequests => "permission-pull-requests",
+ TokenPermissions::Workflows => "permission-workflows",
+ }
+ }
+}
+
pub(crate) struct GenerateAppToken<'a> {
job_name: String,
app_id: &'a str,
app_secret: &'a str,
repository_target: Option<RepositoryTarget>,
+ permissions: Option<Vec<(TokenPermissions, Level)>>,
}
impl<'a> GenerateAppToken<'a> {
- pub fn for_repository(self, repository_target: RepositoryTarget) -> (Step<Use>, StepOutput) {
+ pub fn for_repository(self, repository_target: RepositoryTarget) -> Self {
Self {
repository_target: Some(repository_target),
..self
}
- .into()
+ }
+
+ pub fn with_permissions(self, permissions: impl Into<Vec<(TokenPermissions, Level)>>) -> Self {
+ Self {
+ permissions: Some(permissions.into()),
+ ..self
+ }
}
}
@@ -553,26 +583,24 @@ impl<'a> From<GenerateAppToken<'a>> for (Step<Use>, StepOutput) {
RepositoryTarget {
owner,
repositories,
- permissions,
}| {
input
.when_some(owner, |input, owner| input.add("owner", owner))
.when_some(repositories, |input, repositories| {
input.add("repositories", repositories)
})
- .when_some(permissions, |input, permissions| {
- permissions.into_iter().fold(
- input,
- |input, (permission, level)| {
- input.add(
- permission,
- serde_json::to_value(&level).unwrap_or_default(),
- )
- },
- )
- })
},
- ),
+ )
+ .when_some(token.permissions, |input, permissions| {
+ permissions
+ .into_iter()
+ .fold(input, |input, (permission, level)| {
+ input.add(
+ permission.environment_name(),
+ serde_json::to_value(&level).unwrap_or_default(),
+ )
+ })
+ }),
);
let generated_token = StepOutput::new(&step, "token");
@@ -583,7 +611,6 @@ impl<'a> From<GenerateAppToken<'a>> for (Step<Use>, StepOutput) {
pub(crate) struct RepositoryTarget {
owner: Option<String>,
repositories: Option<String>,
- permissions: Option<Vec<(String, Level)>>,
}
impl RepositoryTarget {
@@ -591,7 +618,6 @@ impl RepositoryTarget {
Self {
owner: Some(owner.to_string()),
repositories: Some(repositories.join("\n")),
- permissions: None,
}
}
@@ -599,14 +625,6 @@ impl RepositoryTarget {
Self {
owner: None,
repositories: None,
- permissions: None,
- }
- }
-
- pub fn permissions(self, permissions: impl Into<Vec<(String, Level)>>) -> Self {
- Self {
- permissions: Some(permissions.into()),
- ..self
}
}
}
@@ -618,8 +636,8 @@ pub(crate) fn generate_token<'a>(
generate_token_with_job_name(app_id_source, app_secret_source)
}
-pub fn authenticate_as_zippy() -> (Step<Use>, StepOutput) {
- generate_token_with_job_name(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY).into()
+pub fn authenticate_as_zippy() -> GenerateAppToken<'static> {
+ generate_token_with_job_name(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY)
}
fn generate_token_with_job_name<'a>(
@@ -631,5 +649,6 @@ fn generate_token_with_job_name<'a>(
app_id: app_id_source,
app_secret: app_secret_source,
repository_target: None,
+ permissions: None,
}
}