diff --git a/.github/workflows/autofix_pr.yml b/.github/workflows/autofix_pr.yml index f055c078cf4f814e342697e311ad5660f68f4624..717c5e2fa5e3c35f3ff33d176f73022e7a0c95d4 100644 --- a/.github/workflows/autofix_pr.yml +++ b/.github/workflows/autofix_pr.yml @@ -97,6 +97,8 @@ jobs: with: app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} + permission-contents: write + permission-workflows: write - name: steps::checkout_repo uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd with: diff --git a/.github/workflows/cherry_pick.yml b/.github/workflows/cherry_pick.yml index 4a3bd0e643e027e7feaeac4760797e2a1fb16e11..ed0800dc5bbf1ec59182e9d24753e9b5112c4d13 100644 --- a/.github/workflows/cherry_pick.yml +++ b/.github/workflows/cherry_pick.yml @@ -35,6 +35,9 @@ jobs: with: app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} + permission-contents: write + permission-workflows: write + permission-pull-requests: write - name: cherry_pick::run_cherry_pick::cherry_pick run: ./script/cherry-pick "$BRANCH" "$COMMIT" "$CHANNEL" env: diff --git a/.github/workflows/compliance_check.yml b/.github/workflows/compliance_check.yml new file mode 100644 index 0000000000000000000000000000000000000000..f09c460c233b04e78df01e7828b4def737dec16e --- /dev/null +++ b/.github/workflows/compliance_check.yml @@ -0,0 +1,55 @@ +# Generated from xtask::workflows::compliance_check +# Rebuild with `cargo xtask workflows`. +name: compliance_check +env: + CARGO_TERM_COLOR: always +on: + schedule: + - cron: 30 17 * * 2 +jobs: + scheduled_compliance_check: + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: steps::checkout_repo + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd + with: + clean: false + fetch-depth: 0 + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 + with: + cache: rust + path: ~/.rustup + - id: determine-version + name: compliance_check::scheduled_compliance_check + run: | + VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' crates/zed/Cargo.toml | tr -d '[:space:]') + if [ -z "$VERSION" ]; then + echo "Could not determine version from crates/zed/Cargo.toml" + exit 1 + fi + TAG="v${VERSION}-pre" + echo "Checking compliance for $TAG" + echo "tag=$TAG" >> "$GITHUB_OUTPUT" + - id: run-compliance-check + name: compliance_check::scheduled_compliance_check::run_compliance_check + run: cargo xtask compliance "$LATEST_TAG" --branch main --report-path target/compliance-report + env: + LATEST_TAG: ${{ steps.determine-version.outputs.tag }} + GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }} + GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} + - name: compliance_check::scheduled_compliance_check::send_failure_slack_notification + if: failure() + run: | + MESSAGE="⚠️ Scheduled compliance check failed for upcoming preview release $LATEST_TAG: There are PRs with missing reviews." + + curl -X POST -H 'Content-type: application/json' \ + --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \ + "$SLACK_WEBHOOK" + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }} + LATEST_TAG: ${{ steps.determine-version.outputs.tag }} +defaults: + run: + shell: bash -euxo pipefail {0} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 35efafcfcd97c0139f8225ce7b15a05946c385ad..1401144ab3abda17dd4f526edd42166d37a47a49 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -293,6 +293,51 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} timeout-minutes: 60 + compliance_check: + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') + runs-on: namespace-profile-16x32-ubuntu-2204 + env: + COMPLIANCE_FILE_PATH: compliance.md + steps: + - name: steps::checkout_repo + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd + with: + clean: false + fetch-depth: 0 + ref: ${{ github.ref }} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 + with: + cache: rust + path: ~/.rustup + - id: run-compliance-check + name: release::compliance_check::run_compliance_check + run: cargo xtask compliance "$GITHUB_REF_NAME" --report-path "$COMPLIANCE_FILE_OUTPUT" + env: + GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }} + GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} + - name: release::compliance_check::send_compliance_slack_notification + if: always() + run: | + if [ "$COMPLIANCE_OUTCOME" == "success" ]; then + STATUS="✅ Compliance check passed for $GITHUB_REF_NAME" + else + STATUS="❌ Compliance check failed for $GITHUB_REF_NAME" + fi + + REPORT_CONTENT="" + if [ -f "$COMPLIANCE_FILE_OUTPUT" ]; then + REPORT_CONTENT=$(cat "$REPORT_FILE") + fi + + MESSAGE=$(printf "%s\n\n%s" "$STATUS" "$REPORT_CONTENT") + + curl -X POST -H 'Content-type: application/json' \ + --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \ + "$SLACK_WEBHOOK" + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }} + COMPLIANCE_OUTCOME: ${{ steps.run-compliance-check.outcome }} bundle_linux_aarch64: needs: - run_tests_linux @@ -613,6 +658,45 @@ jobs: echo "All expected assets are present in the release." env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: steps::checkout_repo + uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd + with: + clean: false + fetch-depth: 0 + ref: ${{ github.ref }} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9 + with: + cache: rust + path: ~/.rustup + - id: run-post-upload-compliance-check + name: release::validate_release_assets::run_post_upload_compliance_check + run: cargo xtask compliance "$GITHUB_REF_NAME" --report-path target/compliance-report + env: + GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }} + GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} + - name: release::validate_release_assets::send_post_upload_compliance_notification + if: always() + run: | + if [ -z "$COMPLIANCE_OUTCOME" ] || [ "$COMPLIANCE_OUTCOME" == "skipped" ]; then + echo "Compliance check was skipped, not sending notification" + exit 0 + fi + + TAG="$GITHUB_REF_NAME" + + if [ "$COMPLIANCE_OUTCOME" == "success" ]; then + MESSAGE="✅ Post-upload compliance re-check passed for $TAG" + else + MESSAGE="❌ Post-upload compliance re-check failed for $TAG" + fi + + curl -X POST -H 'Content-type: application/json' \ + --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \ + "$SLACK_WEBHOOK" + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }} + COMPLIANCE_OUTCOME: ${{ steps.run-post-upload-compliance-check.outcome }} auto_release_preview: needs: - validate_release_assets diff --git a/.zed/tasks.json b/.zed/tasks.json index b6a9d9f4cd794d205d028f12bd8300e70f988f55..be2ccefedca46406713d9abf116c5efa9390fdb8 100644 --- a/.zed/tasks.json +++ b/.zed/tasks.json @@ -4,13 +4,13 @@ "command": "./script/clippy", "args": [], "allow_concurrent_runs": true, - "use_new_terminal": false + "use_new_terminal": false, }, { "label": "cargo run --profile release-fast", "command": "cargo", "args": ["run", "--profile", "release-fast"], "allow_concurrent_runs": true, - "use_new_terminal": false - } + "use_new_terminal": false, + }, ] diff --git a/Cargo.lock b/Cargo.lock index d88eff40b621a72a3216f1da56e5917706655d75..97412711a55667a4976a35313eb6c0388acc74ef 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -15,7 +15,7 @@ dependencies = [ "collections", "env_logger 0.11.8", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "gpui", "image", "indoc", @@ -75,7 +75,7 @@ dependencies = [ "collections", "ctor", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "language", "log", @@ -100,7 +100,7 @@ dependencies = [ "editor", "extension_host", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "language", "project", @@ -163,7 +163,7 @@ dependencies = [ "eval_utils", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "gpui", "gpui_tokio", @@ -227,7 +227,7 @@ dependencies = [ "async-broadcast", "async-trait", "derive_more", - "futures 0.3.31", + "futures 0.3.32", "log", "serde", "serde_json", @@ -260,11 +260,10 @@ dependencies = [ "chrono", "client", "collections", - "credentials_provider", "env_logger 0.11.8", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "google_ai", "gpui", "gpui_tokio", @@ -289,6 +288,7 @@ dependencies = [ "util", "uuid", "watch", + "zed_credentials_provider", ] [[package]] @@ -344,7 +344,7 @@ dependencies = [ "feature_flags", "file_icons", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git", "gpui", @@ -629,7 +629,7 @@ version = "0.1.0" dependencies = [ "anyhow", "chrono", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -677,6 +677,15 @@ dependencies = [ "derive_arbitrary", ] +[[package]] +name = "arc-swap" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a07d1f37ff60921c83bdfc7407723bdefe89b44b98a9b772f225c8f9d67141a6" +dependencies = [ + "rustversion", +] + [[package]] name = "arg_enum_proc_macro" version = "0.3.4" @@ -750,7 +759,7 @@ name = "askpass" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "gpui", "log", "net", @@ -945,7 +954,7 @@ name = "async-pipe" version = "0.1.3" source = "git+https://github.com/zed-industries/async-pipe-rs?rev=82d00a04211cf4e1236029aa03e6b6ce2a74c553#82d00a04211cf4e1236029aa03e6b6ce2a74c553" dependencies = [ - "futures 0.3.31", + "futures 0.3.32", "log", ] @@ -1183,7 +1192,7 @@ dependencies = [ "clock", "ctor", "db", - "futures 0.3.31", + "futures 0.3.32", "futures-lite 1.13.0", "gpui", "http_client", @@ -1862,7 +1871,7 @@ dependencies = [ "anyhow", "aws-sdk-bedrockruntime", "aws-smithy-types", - "futures 0.3.31", + "futures 0.3.32", "schemars", "serde", "serde_json", @@ -2151,7 +2160,7 @@ version = "0.1.0" dependencies = [ "clock", "ctor", - "futures 0.3.31", + "futures 0.3.32", "git2", "gpui", "language", @@ -2348,7 +2357,7 @@ dependencies = [ "collections", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_tokio", "language", @@ -2530,6 +2539,16 @@ dependencies = [ "serde", ] +[[package]] +name = "cargo-platform" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87a0c0e6148f11f01f32650a2ea02d532b2ad4e81d8bd41e6e565b5adc5e6082" +dependencies = [ + "serde", + "serde_core", +] + [[package]] name = "cargo_metadata" version = "0.19.2" @@ -2537,7 +2556,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" dependencies = [ "camino", - "cargo-platform", + "cargo-platform 0.1.9", + "semver", + "serde", + "serde_json", + "thiserror 2.0.17", +] + +[[package]] +name = "cargo_metadata" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef987d17b0a113becdd19d3d0022d04d7ef41f9efe4f3fb63ac44ba61df3ade9" +dependencies = [ + "camino", + "cargo-platform 0.3.2", "semver", "serde", "serde_json", @@ -2669,7 +2702,7 @@ dependencies = [ "client", "clock", "collections", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "language", @@ -2856,6 +2889,7 @@ dependencies = [ "chrono", "clock", "cloud_api_client", + "cloud_api_types", "cloud_llm_client", "collections", "credentials_provider", @@ -2863,12 +2897,13 @@ dependencies = [ "derive_more", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_tokio", "http_client", "http_client_tls", "httparse", + "language_model", "log", "objc2-foundation", "parking_lot", @@ -2900,6 +2935,7 @@ dependencies = [ "util", "windows 0.61.3", "worktree", + "zed_credentials_provider", ] [[package]] @@ -2917,7 +2953,7 @@ version = "0.1.0" dependencies = [ "anyhow", "cloud_api_types", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_tokio", "http_client", @@ -3049,7 +3085,7 @@ dependencies = [ "anyhow", "edit_prediction", "edit_prediction_types", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "icons", @@ -3059,6 +3095,7 @@ dependencies = [ "serde", "serde_json", "text", + "zed_credentials_provider", "zeta_prompt", ] @@ -3095,7 +3132,7 @@ dependencies = [ "extension", "file_finder", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "git_hosting_providers", "git_ui", @@ -3172,7 +3209,7 @@ dependencies = [ "collections", "db", "editor", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "livekit_client", @@ -3280,6 +3317,25 @@ dependencies = [ "workspace", ] +[[package]] +name = "compliance" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "derive_more", + "futures 0.3.32", + "indoc", + "itertools 0.14.0", + "jsonwebtoken", + "octocrab", + "regex", + "semver", + "serde", + "serde_json", + "tokio", +] + [[package]] name = "component" version = "0.1.0" @@ -3433,7 +3489,7 @@ dependencies = [ "async-trait", "base64 0.22.1", "collections", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "log", @@ -3494,7 +3550,7 @@ dependencies = [ "edit_prediction_types", "editor", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "icons", "indoc", @@ -3528,7 +3584,7 @@ dependencies = [ "collections", "dirs 4.0.0", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "log", @@ -3978,7 +4034,7 @@ version = "0.1.0" dependencies = [ "cfg-if", "crash-handler", - "futures 0.3.31", + "futures 0.3.32", "log", "mach2 0.5.0", "minidumper", @@ -4035,12 +4091,8 @@ name = "credentials_provider" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", "gpui", - "paths", - "release_channel", "serde", - "serde_json", ] [[package]] @@ -4318,7 +4370,7 @@ dependencies = [ "collections", "dap-types", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "language", @@ -4360,7 +4412,7 @@ dependencies = [ "dap", "dotenvy", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "json_dotpath", @@ -4531,7 +4583,7 @@ dependencies = [ "anyhow", "dap", "editor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "project", "serde_json", @@ -4558,7 +4610,7 @@ dependencies = [ "editor", "feature_flags", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "hex", @@ -4613,7 +4665,7 @@ name = "deepseek" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -4733,7 +4785,7 @@ dependencies = [ "async-trait", "env_logger 0.11.8", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http 1.3.1", "http_client", @@ -5115,13 +5167,14 @@ dependencies = [ "collections", "copilot", "copilot_ui", + "credentials_provider", "ctor", "db", "edit_prediction_context", "edit_prediction_types", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "heapless", "indoc", @@ -5157,6 +5210,7 @@ dependencies = [ "workspace", "worktree", "zed_actions", + "zed_credentials_provider", "zeta_prompt", "zlog", "zstd", @@ -5173,6 +5227,7 @@ dependencies = [ "client", "cloud_llm_client", "collections", + "criterion", "db", "debug_adapter_extension", "dirs 4.0.0", @@ -5180,7 +5235,7 @@ dependencies = [ "extension", "flate2", "fs", - "futures 0.3.31", + "futures 0.3.32", "gaoya", "gpui", "gpui_platform", @@ -5232,7 +5287,7 @@ dependencies = [ "clock", "collections", "env_logger 0.11.8", - "futures 0.3.31", + "futures 0.3.32", "gpui", "indoc", "language", @@ -5281,7 +5336,7 @@ dependencies = [ "editor", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "indoc", "language", @@ -5326,7 +5381,7 @@ dependencies = [ "feature_flags", "file_icons", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git", "gpui", @@ -5582,6 +5637,13 @@ dependencies = [ "log", ] +[[package]] +name = "env_var" +version = "0.1.0" +dependencies = [ + "gpui", +] + [[package]] name = "envy" version = "0.4.2" @@ -5733,7 +5795,7 @@ dependencies = [ "extension", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_platform", "gpui_tokio", @@ -5843,7 +5905,7 @@ dependencies = [ "collections", "dap", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "heck 0.5.0", "http_client", @@ -5911,7 +5973,7 @@ dependencies = [ "dap", "extension", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_tokio", "http_client", @@ -6119,7 +6181,7 @@ dependencies = [ "ctor", "editor", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "menu", @@ -6421,7 +6483,7 @@ dependencies = [ "collections", "dunce", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "gpui", "ignore", @@ -6519,9 +6581,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" [[package]] name = "futures" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" dependencies = [ "futures-channel", "futures-core", @@ -6534,9 +6596,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" dependencies = [ "futures-core", "futures-sink", @@ -6557,15 +6619,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" [[package]] name = "futures-executor" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" dependencies = [ "futures-core", "futures-task", @@ -6585,9 +6647,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" [[package]] name = "futures-lite" @@ -6619,9 +6681,9 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" dependencies = [ "proc-macro2", "quote", @@ -6630,21 +6692,21 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" [[package]] name = "futures-task" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" [[package]] name = "futures-util" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ "futures 0.1.31", "futures-channel", @@ -6653,9 +6715,9 @@ dependencies = [ "futures-macro", "futures-sink", "futures-task", + "libc", "memchr", "pin-project-lite", - "pin-utils", "slab", "tokio-io", ] @@ -7082,7 +7144,7 @@ dependencies = [ "async-trait", "collections", "derive_more", - "futures 0.3.31", + "futures 0.3.32", "git2", "gpui", "http_client", @@ -7131,7 +7193,6 @@ dependencies = [ "collections", "db", "editor", - "feature_flags", "fs", "git", "git_ui", @@ -7159,7 +7220,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", - "futures 0.3.31", + "futures 0.3.32", "git", "gpui", "http_client", @@ -7189,9 +7250,8 @@ dependencies = [ "ctor", "db", "editor", - "feature_flags", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git", "gpui", @@ -7396,7 +7456,7 @@ name = "google_ai" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -7466,7 +7526,7 @@ dependencies = [ "env_logger 0.11.8", "etagere", "foreign-types 0.5.0", - "futures 0.3.31", + "futures 0.3.32", "futures-concurrency", "getrandom 0.3.4", "gpui_macros", @@ -7541,7 +7601,7 @@ dependencies = [ "calloop-wayland-source", "collections", "filedescriptor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_wgpu", "http_client", @@ -7595,7 +7655,7 @@ dependencies = [ "dispatch2", "etagere", "foreign-types 0.5.0", - "futures 0.3.31", + "futures 0.3.32", "gpui", "image", "itertools 0.14.0", @@ -7664,7 +7724,7 @@ version = "0.1.0" dependencies = [ "anyhow", "console_error_panic_hook", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_wgpu", "http_client", @@ -7715,7 +7775,7 @@ dependencies = [ "anyhow", "collections", "etagere", - "futures 0.3.31", + "futures 0.3.32", "gpui", "image", "itertools 0.14.0", @@ -8199,7 +8259,7 @@ dependencies = [ "async-tar", "bytes 1.11.1", "derive_more", - "futures 0.3.31", + "futures 0.3.32", "http 1.3.1", "http-body 1.0.1", "log", @@ -8316,6 +8376,7 @@ dependencies = [ "http 1.3.1", "hyper 1.7.0", "hyper-util", + "log", "rustls 0.23.33", "rustls-native-certs 0.8.2", "rustls-pki-types", @@ -8324,6 +8385,19 @@ dependencies = [ "tower-service", ] +[[package]] +name = "hyper-timeout" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" +dependencies = [ + "hyper 1.7.0", + "hyper-util", + "pin-project-lite", + "tokio", + "tower-service", +] + [[package]] name = "hyper-tls" version = "0.5.0" @@ -9082,7 +9156,7 @@ dependencies = [ "async-trait", "bytes 1.11.1", "chrono", - "futures 0.3.31", + "futures 0.3.32", "serde", "serde_json", "thiserror 2.0.17", @@ -9098,7 +9172,7 @@ dependencies = [ "anyhow", "async-trait", "async-tungstenite", - "futures 0.3.31", + "futures 0.3.32", "jupyter-protocol", "serde", "serde_json", @@ -9216,7 +9290,7 @@ dependencies = [ "ec4rs", "encoding_rs", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "globset", "gpui", @@ -9296,7 +9370,7 @@ dependencies = [ "collections", "extension", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "language", "log", @@ -9316,13 +9390,13 @@ dependencies = [ "anthropic", "anyhow", "base64 0.22.1", - "client", "cloud_api_client", "cloud_api_types", "cloud_llm_client", "collections", "credentials_provider", - "futures 0.3.31", + "env_var", + "futures 0.3.32", "gpui", "http_client", "icons", @@ -9337,7 +9411,6 @@ dependencies = [ "smol", "thiserror 2.0.17", "util", - "zed_env_vars", ] [[package]] @@ -9366,7 +9439,7 @@ dependencies = [ "extension", "extension_host", "fs", - "futures 0.3.31", + "futures 0.3.32", "google_ai", "gpui", "gpui_tokio", @@ -9443,7 +9516,7 @@ dependencies = [ "command_palette_hooks", "edit_prediction", "editor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "itertools 0.14.0", "language", @@ -9479,7 +9552,7 @@ dependencies = [ "chrono", "collections", "fs", - "futures 0.3.31", + "futures 0.3.32", "globset", "gpui", "grammars", @@ -9866,7 +9939,7 @@ dependencies = [ "core-video", "coreaudio-rs 0.12.1", "cpal", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_platform", "gpui_tokio", @@ -9910,7 +9983,7 @@ name = "lmstudio" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -9981,7 +10054,7 @@ dependencies = [ "async-pipe", "collections", "ctor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_util", "log", @@ -10001,7 +10074,7 @@ dependencies = [ [[package]] name = "lsp-types" version = "0.95.1" -source = "git+https://github.com/zed-industries/lsp-types?rev=a4f410987660bf560d1e617cb78117c6b6b9f599#a4f410987660bf560d1e617cb78117c6b6b9f599" +source = "git+https://github.com/zed-industries/lsp-types?rev=c7396459fefc7886b4adfa3b596832405ae1e880#c7396459fefc7886b4adfa3b596832405ae1e880" dependencies = [ "bitflags 1.3.2", "serde", @@ -10121,7 +10194,7 @@ dependencies = [ "collections", "env_logger 0.11.8", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_platform", "html5ever 0.27.0", @@ -10152,6 +10225,7 @@ dependencies = [ "language", "log", "markdown", + "project", "settings", "tempfile", "theme_settings", @@ -10568,7 +10642,7 @@ name = "mistral" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -10757,7 +10831,7 @@ name = "nc" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "net", "smol", ] @@ -10853,7 +10927,7 @@ dependencies = [ "async-std", "async-tar", "async-trait", - "futures 0.3.31", + "futures 0.3.32", "http_client", "log", "paths", @@ -11177,7 +11251,7 @@ version = "0.9.2" source = "git+https://github.com/KillTheMule/nvim-rs?rev=764dd270c642f77f10f3e19d05cc178a6cbe69f3#764dd270c642f77f10f3e19d05cc178a6cbe69f3" dependencies = [ "async-trait", - "futures 0.3.31", + "futures 0.3.32", "log", "rmp", "rmpv", @@ -11372,12 +11446,54 @@ dependencies = [ "memchr", ] +[[package]] +name = "octocrab" +version = "0.49.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63f6687a23731011d0117f9f4c3cdabaa7b5e42ca671f42b5cc0657c492540e3" +dependencies = [ + "arc-swap", + "async-trait", + "base64 0.22.1", + "bytes 1.11.1", + "cargo_metadata 0.23.1", + "cfg-if", + "chrono", + "either", + "futures 0.3.32", + "futures-core", + "futures-util", + "getrandom 0.2.16", + "http 1.3.1", + "http-body 1.0.1", + "http-body-util", + "hyper 1.7.0", + "hyper-rustls 0.27.7", + "hyper-timeout", + "hyper-util", + "jsonwebtoken", + "once_cell", + "percent-encoding", + "pin-project", + "secrecy", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "snafu", + "tokio", + "tower 0.5.2", + "tower-http 0.6.6", + "url", + "web-time", +] + [[package]] name = "ollama" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -11484,7 +11600,7 @@ name = "open_ai" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "log", "rand 0.9.2", @@ -11502,7 +11618,7 @@ version = "0.1.0" dependencies = [ "editor", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "picker", @@ -11523,7 +11639,7 @@ name = "open_router" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "http_client", "schemars", "serde", @@ -11538,7 +11654,7 @@ name = "opencode" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", + "futures 0.3.32", "google_ai", "http_client", "schemars", @@ -12852,7 +12968,7 @@ checksum = "af3fb618632874fb76937c2361a7f22afd393c982a2165595407edc75b06d3c1" dependencies = [ "atomic", "crossbeam-queue", - "futures 0.3.31", + "futures 0.3.32", "log", "parking_lot", "pin-project", @@ -13085,7 +13201,7 @@ dependencies = [ "extension", "fancy-regex 0.17.0", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git", "git2", @@ -13138,6 +13254,7 @@ dependencies = [ "wax", "which 6.0.3", "worktree", + "zed_credentials_provider", "zeroize", "zlog", "ztracing", @@ -13151,7 +13268,7 @@ dependencies = [ "askpass", "clap", "client", - "futures 0.3.31", + "futures 0.3.32", "gpui", "gpui_platform", "http_client", @@ -13212,7 +13329,7 @@ version = "0.1.0" dependencies = [ "anyhow", "editor", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "language", @@ -13254,7 +13371,7 @@ dependencies = [ "chrono", "collections", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "handlebars 4.5.0", @@ -14009,7 +14126,7 @@ dependencies = [ "extension", "extension_host", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "http_client", @@ -14193,7 +14310,7 @@ dependencies = [ "base64 0.22.1", "collections", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "log", "parking_lot", @@ -14221,7 +14338,7 @@ dependencies = [ "anyhow", "askpass", "auto_update", - "futures 0.3.31", + "futures 0.3.32", "gpui", "log", "markdown", @@ -14259,7 +14376,7 @@ dependencies = [ "extension_host", "fork", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "git2", "git_hosting_providers", @@ -14341,7 +14458,7 @@ dependencies = [ "editor", "feature_flags", "file_icons", - "futures 0.3.31", + "futures 0.3.32", "gpui", "html_to_markdown", "http_client", @@ -14466,7 +14583,7 @@ version = "0.1.0" dependencies = [ "anyhow", "bytes 1.11.1", - "futures 0.3.31", + "futures 0.3.32", "gpui_util", "http_client", "http_client_tls", @@ -14638,7 +14755,7 @@ dependencies = [ "async-tungstenite", "base64 0.22.1", "collections", - "futures 0.3.31", + "futures 0.3.32", "gpui", "parking_lot", "proto", @@ -14731,7 +14848,7 @@ dependencies = [ "chrono", "data-encoding", "dirs 6.0.0", - "futures 0.3.31", + "futures 0.3.32", "glob", "jupyter-protocol", "serde", @@ -15105,7 +15222,7 @@ dependencies = [ "backtrace", "chrono", "flume", - "futures 0.3.31", + "futures 0.3.32", "parking_lot", "rand 0.9.2", "web-time", @@ -15333,7 +15450,7 @@ dependencies = [ "collections", "editor", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "itertools 0.14.0", "language", @@ -15372,6 +15489,15 @@ dependencies = [ "zeroize", ] +[[package]] +name = "secrecy" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e891af845473308773346dc847b2c23ee78fe442e0472ac50e22a18a93d3ae5a" +dependencies = [ + "zeroize", +] + [[package]] name = "security-framework" version = "2.11.1" @@ -15612,7 +15738,7 @@ dependencies = [ "collections", "ec4rs", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "indoc", "inventory", @@ -15716,7 +15842,7 @@ dependencies = [ "editor", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "heck 0.5.0", @@ -15747,6 +15873,7 @@ dependencies = [ "util", "workspace", "zed_actions", + "zed_credentials_provider", ] [[package]] @@ -15862,7 +15989,6 @@ dependencies = [ "agent_ui", "anyhow", "chrono", - "collections", "editor", "feature_flags", "fs", @@ -16076,6 +16202,27 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f7a918bd2a9951d18ee6e48f076843e8e73a9a5d22cf05bcd4b7a81bdd04e17" +[[package]] +name = "snafu" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e84b3f4eacbf3a1ce05eac6763b4d629d60cbc94d632e4092c54ade71f1e1a2" +dependencies = [ + "snafu-derive", +] + +[[package]] +name = "snafu-derive" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1c97747dbf44bb1ca44a561ece23508e99cb592e862f22222dcf42f51d1e451" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "snippet" version = "0.1.0" @@ -16092,7 +16239,7 @@ dependencies = [ "collections", "extension", "fs", - "futures 0.3.31", + "futures 0.3.32", "gpui", "indoc", "parking_lot", @@ -16204,7 +16351,7 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "futures 0.3.31", + "futures 0.3.32", "indoc", "libsqlite3-sys", "log", @@ -17250,7 +17397,7 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "futures 0.3.31", + "futures 0.3.32", "gpui", "hex", "log", @@ -17297,7 +17444,7 @@ dependencies = [ name = "telemetry" version = "0.1.0" dependencies = [ - "futures 0.3.31", + "futures 0.3.32", "serde", "serde_json", "telemetry_events", @@ -17352,7 +17499,7 @@ dependencies = [ "alacritty_terminal", "anyhow", "collections", - "futures 0.3.31", + "futures 0.3.32", "gpui", "itertools 0.14.0", "libc", @@ -17398,7 +17545,7 @@ dependencies = [ "db", "dirs 4.0.0", "editor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "itertools 0.14.0", "language", @@ -18034,7 +18181,7 @@ dependencies = [ "anyhow", "convert_case 0.8.0", "editor", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "gpui", "language", @@ -18080,8 +18227,10 @@ dependencies = [ "pin-project-lite", "sync_wrapper 1.0.2", "tokio", + "tokio-util", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -18119,6 +18268,7 @@ dependencies = [ "tower 0.5.2", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -18910,7 +19060,7 @@ dependencies = [ "command-fds", "dirs 4.0.0", "dunce", - "futures 0.3.31", + "futures 0.3.32", "futures-lite 1.13.0", "git2", "globset", @@ -19067,7 +19217,7 @@ dependencies = [ "db", "editor", "env_logger 0.11.8", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git_ui", "gpui", @@ -19431,7 +19581,7 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7516db7f32decdadb1c3b8deb1b7d78b9df7606c5cc2f6241737c2ab3a0258e" dependencies = [ - "futures 0.3.31", + "futures 0.3.32", "js-sys", "wasm-bindgen", "web-sys", @@ -19787,7 +19937,7 @@ dependencies = [ "cap-std", "cap-time-ext", "fs-set-times", - "futures 0.3.31", + "futures 0.3.32", "io-extras", "io-lifetimes", "rustix 1.1.2", @@ -19811,7 +19961,7 @@ dependencies = [ "anyhow", "async-trait", "bytes 1.11.1", - "futures 0.3.31", + "futures 0.3.32", "wasmtime", ] @@ -19829,7 +19979,7 @@ name = "watch" version = "0.1.0" dependencies = [ "ctor", - "futures 0.3.31", + "futures 0.3.32", "gpui", "parking_lot", "zlog", @@ -19965,6 +20115,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" dependencies = [ "js-sys", + "serde", "wasm-bindgen", ] @@ -19999,7 +20150,7 @@ dependencies = [ "client", "cloud_api_types", "cloud_llm_client", - "futures 0.3.31", + "futures 0.3.32", "gpui", "http_client", "language_model", @@ -21193,7 +21344,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4db52a11d4dfb0a59f194c064055794ee6564eb1ced88c25da2cf76e50c5621" dependencies = [ "bitflags 2.10.0", - "futures 0.3.31", + "futures 0.3.32", "once_cell", ] @@ -21444,7 +21595,7 @@ dependencies = [ "db", "feature_flags", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "gpui", "http_client", @@ -21492,7 +21643,7 @@ dependencies = [ "collections", "encoding_rs", "fs", - "futures 0.3.31", + "futures 0.3.32", "fuzzy", "git", "gpui", @@ -21702,9 +21853,10 @@ dependencies = [ "annotate-snippets", "anyhow", "backtrace", - "cargo_metadata", + "cargo_metadata 0.19.2", "cargo_toml", "clap", + "compliance", "gh-workflow", "indexmap", "indoc", @@ -21714,6 +21866,7 @@ dependencies = [ "serde_json", "serde_yaml", "strum 0.27.2", + "tokio", "toml 0.8.23", "toml_edit 0.22.27", ] @@ -21744,7 +21897,7 @@ dependencies = [ "base64 0.22.1", "bytes 1.11.1", "flate2", - "futures 0.3.31", + "futures 0.3.32", "http-body-util", "hyper 1.7.0", "hyper-util", @@ -21949,7 +22102,7 @@ dependencies = [ "feedback", "file_finder", "fs", - "futures 0.3.31", + "futures 0.3.32", "git", "git_graph", "git_hosting_providers", @@ -22181,10 +22334,24 @@ dependencies = [ ] [[package]] -name = "zed_env_vars" +name = "zed_credentials_provider" version = "0.1.0" dependencies = [ + "anyhow", + "credentials_provider", + "futures 0.3.32", "gpui", + "paths", + "release_channel", + "serde", + "serde_json", +] + +[[package]] +name = "zed_env_vars" +version = "0.1.0" +dependencies = [ + "env_var", ] [[package]] @@ -22220,7 +22387,7 @@ dependencies = [ [[package]] name = "zed_glsl" -version = "0.2.2" +version = "0.2.3" dependencies = [ "zed_extension_api 0.1.0", ] @@ -22234,7 +22401,7 @@ dependencies = [ [[package]] name = "zed_proto" -version = "0.3.1" +version = "0.3.2" dependencies = [ "zed_extension_api 0.7.0", ] @@ -22325,7 +22492,7 @@ dependencies = [ "asynchronous-codec", "bytes 1.11.1", "crossbeam-queue", - "futures 0.3.31", + "futures 0.3.32", "log", "num-traits", "once_cell", @@ -22375,6 +22542,7 @@ name = "zeta_prompt" version = "0.1.0" dependencies = [ "anyhow", + "imara-diff", "indoc", "serde", "strum 0.27.2", diff --git a/Cargo.toml b/Cargo.toml index 3a393237ab9f5a5a8cd4b02517f6d22382ff51ff..5cb5b991b645ec1b78b16f48493c7c8dc1426344 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -61,6 +61,7 @@ members = [ "crates/edit_prediction_ui", "crates/editor", "crates/encoding_selector", + "crates/env_var", "crates/etw_tracing", "crates/eval_cli", "crates/eval_utils", @@ -220,6 +221,7 @@ members = [ "crates/x_ai", "crates/zed", "crates/zed_actions", + "crates/zed_credentials_provider", "crates/zed_env_vars", "crates/zeta_prompt", "crates/zlog", @@ -240,6 +242,7 @@ members = [ # Tooling # + "tooling/compliance", "tooling/perf", "tooling/xtask", ] @@ -287,6 +290,7 @@ collab_ui = { path = "crates/collab_ui" } collections = { path = "crates/collections", version = "0.1.0" } command_palette = { path = "crates/command_palette" } command_palette_hooks = { path = "crates/command_palette_hooks" } +compliance = { path = "tooling/compliance" } component = { path = "crates/component" } component_preview = { path = "crates/component_preview" } context_server = { path = "crates/context_server" } @@ -309,6 +313,7 @@ dev_container = { path = "crates/dev_container" } diagnostics = { path = "crates/diagnostics" } editor = { path = "crates/editor" } encoding_selector = { path = "crates/encoding_selector" } +env_var = { path = "crates/env_var" } etw_tracing = { path = "crates/etw_tracing" } eval_utils = { path = "crates/eval_utils" } extension = { path = "crates/extension" } @@ -465,6 +470,7 @@ worktree = { path = "crates/worktree" } x_ai = { path = "crates/x_ai" } zed = { path = "crates/zed" } zed_actions = { path = "crates/zed_actions" } +zed_credentials_provider = { path = "crates/zed_credentials_provider" } zed_env_vars = { path = "crates/zed_env_vars" } edit_prediction = { path = "crates/edit_prediction" } zeta_prompt = { path = "crates/zeta_prompt" } @@ -543,6 +549,7 @@ derive_more = { version = "2.1.1", features = [ "add_assign", "deref", "deref_mut", + "display", "from_str", "mul", "mul_assign", @@ -592,7 +599,7 @@ linkify = "0.10.0" libwebrtc = "0.3.26" livekit = { version = "0.7.32", features = ["tokio", "rustls-tls-native-roots"] } log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] } -lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "a4f410987660bf560d1e617cb78117c6b6b9f599" } +lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "c7396459fefc7886b4adfa3b596832405ae1e880" } mach2 = "0.5" markup5ever_rcdom = "0.3.0" metal = "0.33" diff --git a/assets/icons/diff_split.svg b/assets/icons/diff_split.svg index de2056466f7ef1081ee00dabb8b4d5baa8fc9217..dcafeb8df5c28bcac1f1fe8cf5783eebd8d8cd8a 100644 --- a/assets/icons/diff_split.svg +++ b/assets/icons/diff_split.svg @@ -1,5 +1,4 @@ - - - + + diff --git a/assets/icons/diff_split_auto.svg b/assets/icons/diff_split_auto.svg new file mode 100644 index 0000000000000000000000000000000000000000..f9dd7076be75aaf3e90286140a60deece5016114 --- /dev/null +++ b/assets/icons/diff_split_auto.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/diff_unified.svg b/assets/icons/diff_unified.svg index b2d3895ae5466454e9cefc4e77e3c3f2a19cde8c..28735c16f682159b6b0a099176d6fc3b75cd248e 100644 --- a/assets/icons/diff_unified.svg +++ b/assets/icons/diff_unified.svg @@ -1,4 +1,4 @@ - - + + diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 523a961d6964e2c6e08d03b75a3e1eb1890fc586..5ecca68e0404b400af2c285dc51df0a65d6fe07a 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -284,12 +284,36 @@ "context": "AcpThread", "bindings": { "ctrl--": "pane::GoBack", + "pageup": "agent::ScrollOutputPageUp", + "pagedown": "agent::ScrollOutputPageDown", + "home": "agent::ScrollOutputToTop", + "end": "agent::ScrollOutputToBottom", + "up": "agent::ScrollOutputLineUp", + "down": "agent::ScrollOutputLineDown", + "shift-pageup": "agent::ScrollOutputToPreviousMessage", + "shift-pagedown": "agent::ScrollOutputToNextMessage", + "ctrl-alt-pageup": "agent::ScrollOutputPageUp", + "ctrl-alt-pagedown": "agent::ScrollOutputPageDown", + "ctrl-alt-home": "agent::ScrollOutputToTop", + "ctrl-alt-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage", }, }, { "context": "AcpThread > Editor", "use_key_equivalents": true, "bindings": { + "ctrl-alt-pageup": "agent::ScrollOutputPageUp", + "ctrl-alt-pagedown": "agent::ScrollOutputPageDown", + "ctrl-alt-home": "agent::ScrollOutputToTop", + "ctrl-alt-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage", "ctrl-shift-r": "agent::OpenAgentDiff", "ctrl-shift-d": "git::Diff", "shift-alt-y": "agent::KeepAll", @@ -574,6 +598,7 @@ // Change the default action on `menu::Confirm` by setting the parameter // "alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": true }], "alt-ctrl-o": ["projects::OpenRecent", { "create_new_window": false }], + "ctrl-r": ["projects::OpenRecent", { "create_new_window": false }], "alt-shift-open": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }], // Change to open path modal for existing remote connection by setting the parameter // "alt-ctrl-shift-o": "["projects::OpenRemote", { "from_existing_connection": true }]", @@ -1123,6 +1148,8 @@ "bindings": { "ctrl-k": "recent_projects::ToggleActionsMenu", "ctrl-shift-a": "workspace::AddFolderToProject", + "shift-backspace": "recent_projects::RemoveSelected", + "ctrl-shift-enter": "recent_projects::AddToWorkspace", }, }, { @@ -1249,6 +1276,8 @@ "alt-down": "markdown::ScrollDownByItem", "ctrl-home": "markdown::ScrollToTop", "ctrl-end": "markdown::ScrollToBottom", + "find": "buffer_search::Deploy", + "ctrl-f": "buffer_search::Deploy", }, }, { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 9ca71aa9be3a99b1b52ab8490a6fe841956ecf50..c74b5900001a2c798076783b2741aba84ffc4b15 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -327,12 +327,36 @@ "context": "AcpThread", "bindings": { "ctrl--": "pane::GoBack", + "pageup": "agent::ScrollOutputPageUp", + "pagedown": "agent::ScrollOutputPageDown", + "home": "agent::ScrollOutputToTop", + "end": "agent::ScrollOutputToBottom", + "up": "agent::ScrollOutputLineUp", + "down": "agent::ScrollOutputLineDown", + "shift-pageup": "agent::ScrollOutputToPreviousMessage", + "shift-pagedown": "agent::ScrollOutputToNextMessage", + "ctrl-pageup": "agent::ScrollOutputPageUp", + "ctrl-pagedown": "agent::ScrollOutputPageDown", + "ctrl-home": "agent::ScrollOutputToTop", + "ctrl-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-pagedown": "agent::ScrollOutputToNextMessage", }, }, { "context": "AcpThread > Editor", "use_key_equivalents": true, "bindings": { + "ctrl-pageup": "agent::ScrollOutputPageUp", + "ctrl-pagedown": "agent::ScrollOutputPageDown", + "ctrl-home": "agent::ScrollOutputToTop", + "ctrl-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-pagedown": "agent::ScrollOutputToNextMessage", "shift-ctrl-r": "agent::OpenAgentDiff", "shift-ctrl-d": "git::Diff", "shift-alt-y": "agent::KeepAll", @@ -644,6 +668,7 @@ // Change the default action on `menu::Confirm` by setting the parameter // "alt-cmd-o": ["projects::OpenRecent", {"create_new_window": true }], "alt-cmd-o": ["projects::OpenRecent", { "create_new_window": false }], + "ctrl-r": ["projects::OpenRecent", { "create_new_window": false }], "ctrl-cmd-o": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }], "ctrl-cmd-shift-o": ["projects::OpenRemote", { "from_existing_connection": true, "create_new_window": false }], "cmd-ctrl-b": "branches::OpenRecent", @@ -1188,6 +1213,8 @@ "bindings": { "cmd-k": "recent_projects::ToggleActionsMenu", "cmd-shift-a": "workspace::AddFolderToProject", + "shift-backspace": "recent_projects::RemoveSelected", + "cmd-shift-enter": "recent_projects::AddToWorkspace", }, }, { @@ -1349,6 +1376,7 @@ "alt-down": "markdown::ScrollDownByItem", "cmd-up": "markdown::ScrollToTop", "cmd-down": "markdown::ScrollToBottom", + "cmd-f": "buffer_search::Deploy", }, }, { diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 1883d0df0b3ff44ad8dceefb997198cb203a9b8d..a9eb3933423ff60fe60ac391b12773ce7146fb0d 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -285,12 +285,36 @@ "context": "AcpThread", "bindings": { "ctrl--": "pane::GoBack", + "pageup": "agent::ScrollOutputPageUp", + "pagedown": "agent::ScrollOutputPageDown", + "home": "agent::ScrollOutputToTop", + "end": "agent::ScrollOutputToBottom", + "up": "agent::ScrollOutputLineUp", + "down": "agent::ScrollOutputLineDown", + "shift-pageup": "agent::ScrollOutputToPreviousMessage", + "shift-pagedown": "agent::ScrollOutputToNextMessage", + "ctrl-alt-pageup": "agent::ScrollOutputPageUp", + "ctrl-alt-pagedown": "agent::ScrollOutputPageDown", + "ctrl-alt-home": "agent::ScrollOutputToTop", + "ctrl-alt-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage", }, }, { "context": "AcpThread > Editor", "use_key_equivalents": true, "bindings": { + "ctrl-alt-pageup": "agent::ScrollOutputPageUp", + "ctrl-alt-pagedown": "agent::ScrollOutputPageDown", + "ctrl-alt-home": "agent::ScrollOutputToTop", + "ctrl-alt-end": "agent::ScrollOutputToBottom", + "ctrl-alt-up": "agent::ScrollOutputLineUp", + "ctrl-alt-down": "agent::ScrollOutputLineDown", + "ctrl-alt-shift-pageup": "agent::ScrollOutputToPreviousMessage", + "ctrl-alt-shift-pagedown": "agent::ScrollOutputToNextMessage", "ctrl-shift-r": "agent::OpenAgentDiff", "ctrl-shift-d": "git::Diff", "shift-alt-y": "agent::KeepAll", @@ -1134,6 +1158,8 @@ "bindings": { "ctrl-k": "recent_projects::ToggleActionsMenu", "ctrl-shift-a": "workspace::AddFolderToProject", + "shift-backspace": "recent_projects::RemoveSelected", + "ctrl-shift-enter": "recent_projects::AddToWorkspace", }, }, { @@ -1274,6 +1300,8 @@ "alt-down": "markdown::ScrollDownByItem", "ctrl-home": "markdown::ScrollToTop", "ctrl-end": "markdown::ScrollToBottom", + "find": "buffer_search::Deploy", + "ctrl-f": "buffer_search::Deploy", }, }, { diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 1a7e7bf77248b6f863d4a6dbc1e268b4c5ae3576..220b44ff537ffa791b23c0c5b7d86b6768d74dc2 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -1096,6 +1096,7 @@ "ctrl-e": "markdown::ScrollDown", "g g": "markdown::ScrollToTop", "shift-g": "markdown::ScrollToBottom", + "/": "buffer_search::Deploy", }, }, { diff --git a/assets/settings/default.json b/assets/settings/default.json index 2e0ddc2da70af5516d14a2fa8418a759bec62eb1..63e906e3b11206fc458f8d7353f3ecba0abeb825 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -225,6 +225,11 @@ // 3. Hide on both typing and cursor movement: // "on_typing_and_movement" "hide_mouse": "on_typing_and_movement", + // Determines whether the focused panel follows the mouse location. + "focus_follows_mouse": { + "enabled": false, + "debounce_ms": 250, + }, // Determines how snippets are sorted relative to other completion items. // // 1. Place snippets at the top of the completion list: @@ -1102,11 +1107,14 @@ // "all_screens" - Show these notifications on all screens // "never" - Never show these notifications "notify_when_agent_waiting": "primary_screen", - // Whether to play a sound when the agent has either completed + // When to play a sound when the agent has either completed // its response, or needs user input. - - // Default: false - "play_sound_when_agent_done": false, + // "never" - Never play the sound + // "when_hidden" - Only play the sound when the agent panel is not visible + // "always" - Always play the sound + // + // Default: never + "play_sound_when_agent_done": "never", // Whether to have edit cards in the agent panel expanded, showing a preview of the full diff. // // Default: true @@ -1136,6 +1144,11 @@ // // Default: false "show_turn_stats": false, + // Whether to show the merge conflict indicator in the status bar + // that offers to resolve conflicts using the agent. + // + // Default: true + "show_merge_conflict_indicator": true, }, // Whether the screen sharing icon is shown in the os status bar. "show_call_status_icon": true, @@ -2404,6 +2417,7 @@ "toggle_relative_line_numbers": false, "use_system_clipboard": "always", "use_smartcase_find": false, + "use_regex_search": true, "gdefault": false, "highlight_on_yank_duration": 200, "custom_digraphs": {}, @@ -2529,21 +2543,31 @@ "format_dap_log_messages": true, "button": true, }, - // Configures any number of settings profiles that are temporarily applied on - // top of your existing user settings when selected from - // `settings profile selector: toggle`. + // Configures any number of settings profiles that are temporarily applied + // when selected from `settings profile selector: toggle`. + // + // Each profile has an optional `base` ("user" or "default") and a `settings` + // object. When `base` is "user" (the default), the profile applies on top of + // your user settings. When `base` is "default", user settings are ignored and + // the profile applies on top of Zed's defaults. + // // Examples: // "profiles": { // "Presenting": { - // "agent_ui_font_size": 20.0, - // "buffer_font_size": 20.0, - // "theme": "One Light", - // "ui_font_size": 20.0 + // "base": "default", + // "settings": { + // "agent_ui_font_size": 20.0, + // "buffer_font_size": 20.0, + // "theme": "One Light", + // "ui_font_size": 20.0 + // } // }, // "Python (ty)": { - // "languages": { - // "Python": { - // "language_servers": ["ty"] + // "settings": { + // "languages": { + // "Python": { + // "language_servers": ["ty"] + // } // } // } // } diff --git a/assets/settings/initial_tasks.json b/assets/settings/initial_tasks.json index 0d6f4471320e443f3c4a483f53f6901c76e7dc72..bb6c9c04ae14db8f2d01adabd8d1494caa7d7407 100644 --- a/assets/settings/initial_tasks.json +++ b/assets/settings/initial_tasks.json @@ -50,9 +50,9 @@ "show_command": true, // Which edited buffers to save before running the task: // * `all` — save all edited buffers - // * `current` — save current buffer only + // * `current` — save currently active buffer only // * `none` — don't save any buffers - "save": "all", + "save": "none", // Represents the tags for inline runnable indicators, or spawning multiple tasks at once. // "tags": [] }, diff --git a/assets/themes/ayu/ayu.json b/assets/themes/ayu/ayu.json index 3450e35bf62d780bdaf0cff2c6bc9f8bdfea7c1e..f27566c4f72cac3938a752c64d95d0500c595306 100644 --- a/assets/themes/ayu/ayu.json +++ b/assets/themes/ayu/ayu.json @@ -283,7 +283,7 @@ "font_weight": null }, "preproc": { - "color": "#bfbdb6ff", + "color": "#ff8f3fff", "font_style": null, "font_weight": null }, @@ -391,6 +391,16 @@ "color": "#5ac1feff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#aad94cff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#f07178ff", + "font_style": null, + "font_weight": null } } } @@ -675,7 +685,7 @@ "font_weight": null }, "preproc": { - "color": "#5c6166ff", + "color": "#fa8d3eff", "font_style": null, "font_weight": null }, @@ -783,6 +793,16 @@ "color": "#3b9ee5ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#6cbf43ff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#ff6666ff", + "font_style": null, + "font_weight": null } } } @@ -1067,7 +1087,7 @@ "font_weight": null }, "preproc": { - "color": "#cccac2ff", + "color": "#ffad65ff", "font_style": null, "font_weight": null }, @@ -1175,6 +1195,16 @@ "color": "#72cffeff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#aad94cff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#f07178ff", + "font_style": null, + "font_weight": null } } } diff --git a/assets/themes/gruvbox/gruvbox.json b/assets/themes/gruvbox/gruvbox.json index 16ae188712f7a800ab4fb8a81a2d24cac99da56b..4330df54fccae55e7ca077c0da9a891ee71ebe3a 100644 --- a/assets/themes/gruvbox/gruvbox.json +++ b/assets/themes/gruvbox/gruvbox.json @@ -293,7 +293,7 @@ "font_weight": null }, "preproc": { - "color": "#fbf1c7ff", + "color": "#fb4833ff", "font_style": null, "font_weight": null }, @@ -406,6 +406,16 @@ "color": "#83a598ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#b8bb26ff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#fb4934ff", + "font_style": null, + "font_weight": null } } } @@ -700,7 +710,7 @@ "font_weight": null }, "preproc": { - "color": "#fbf1c7ff", + "color": "#fb4833ff", "font_style": null, "font_weight": null }, @@ -813,6 +823,16 @@ "color": "#83a598ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#b8bb26ff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#fb4934ff", + "font_style": null, + "font_weight": null } } } @@ -1107,7 +1127,7 @@ "font_weight": null }, "preproc": { - "color": "#fbf1c7ff", + "color": "#fb4833ff", "font_style": null, "font_weight": null }, @@ -1220,6 +1240,16 @@ "color": "#83a598ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#b8bb26ff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#fb4934ff", + "font_style": null, + "font_weight": null } } } @@ -1514,7 +1544,7 @@ "font_weight": null }, "preproc": { - "color": "#282828ff", + "color": "#9d0006ff", "font_style": null, "font_weight": null }, @@ -1627,6 +1657,16 @@ "color": "#0b6678ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#79740eff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#9d0006ff", + "font_style": null, + "font_weight": null } } } @@ -1921,7 +1961,7 @@ "font_weight": null }, "preproc": { - "color": "#282828ff", + "color": "#9d0006ff", "font_style": null, "font_weight": null }, @@ -2034,6 +2074,16 @@ "color": "#0b6678ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#79740eff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#9d0006ff", + "font_style": null, + "font_weight": null } } } @@ -2328,7 +2378,7 @@ "font_weight": null }, "preproc": { - "color": "#282828ff", + "color": "#9d0006ff", "font_style": null, "font_weight": null }, @@ -2441,6 +2491,16 @@ "color": "#0b6678ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#79740eff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#9d0006ff", + "font_style": null, + "font_weight": null } } } diff --git a/assets/themes/one/one.json b/assets/themes/one/one.json index 05af3f5cfeec7d4a24c4fe6d684fb21d04e2d81c..e60b6314b9595ac02bd6a43be4580ba9331ae769 100644 --- a/assets/themes/one/one.json +++ b/assets/themes/one/one.json @@ -290,7 +290,7 @@ "font_weight": null }, "preproc": { - "color": "#dce0e5ff", + "color": "#b477cfff", "font_style": null, "font_weight": null }, @@ -403,6 +403,16 @@ "color": "#73ade9ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#98c379ff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#e06c75ff", + "font_style": null, + "font_weight": null } } } @@ -692,7 +702,7 @@ "font_weight": null }, "preproc": { - "color": "#242529ff", + "color": "#a449abff", "font_style": null, "font_weight": null }, @@ -805,6 +815,16 @@ "color": "#5b79e3ff", "font_style": null, "font_weight": null + }, + "diff.plus": { + "color": "#50a14fff", + "font_style": null, + "font_weight": null + }, + "diff.minus": { + "color": "#e45649ff", + "font_style": null, + "font_weight": null } } } diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 937592b8a94df00ca1c7565d43893b99693f8892..36c9fb40c4a573e09da05618a29c1898cced60ad 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -1032,6 +1032,7 @@ pub struct AcpThread { connection: Rc, token_usage: Option, prompt_capabilities: acp::PromptCapabilities, + available_commands: Vec, _observe_prompt_capabilities: Task>, terminals: HashMap>, pending_terminal_output: HashMap>>, @@ -1220,6 +1221,7 @@ impl AcpThread { session_id, token_usage: None, prompt_capabilities, + available_commands: Vec::new(), _observe_prompt_capabilities: task, terminals: HashMap::default(), pending_terminal_output: HashMap::default(), @@ -1239,6 +1241,10 @@ impl AcpThread { self.prompt_capabilities.clone() } + pub fn available_commands(&self) -> &[acp::AvailableCommand] { + &self.available_commands + } + pub fn draft_prompt(&self) -> Option<&[acp::ContentBlock]> { self.draft_prompt.as_deref() } @@ -1419,7 +1425,10 @@ impl AcpThread { acp::SessionUpdate::AvailableCommandsUpdate(acp::AvailableCommandsUpdate { available_commands, .. - }) => cx.emit(AcpThreadEvent::AvailableCommandsUpdated(available_commands)), + }) => { + self.available_commands = available_commands.clone(); + cx.emit(AcpThreadEvent::AvailableCommandsUpdated(available_commands)); + } acp::SessionUpdate::CurrentModeUpdate(acp::CurrentModeUpdate { current_mode_id, .. @@ -2616,7 +2625,7 @@ impl AcpThread { text_diff(old_text.as_str(), &content) .into_iter() .map(|(range, replacement)| { - (snapshot.anchor_range_around(range), replacement) + (snapshot.anchor_range_inside(range), replacement) }) .collect::>() }) diff --git a/crates/acp_thread/src/diff.rs b/crates/acp_thread/src/diff.rs index 08b1b9bdf24d1ff9980164c1af8b3e60bd2f3339..a6d3b86db7c980bb5e4e5a8cacee95abeaabc3f1 100644 --- a/crates/acp_thread/src/diff.rs +++ b/crates/acp_thread/src/diff.rs @@ -191,7 +191,7 @@ impl Diff { } pub fn has_revealed_range(&self, cx: &App) -> bool { - self.multibuffer().read(cx).paths().next().is_some() + !self.multibuffer().read(cx).is_empty() } pub fn needs_update(&self, old_text: &str, new_text: &str, cx: &App) -> bool { diff --git a/crates/acp_thread/src/mention.rs b/crates/acp_thread/src/mention.rs index 753838d3b98ed60dc02c3d9383c28fe4f848a29e..28038ecbc04c59d1c5107872210056f11b413141 100644 --- a/crates/acp_thread/src/mention.rs +++ b/crates/acp_thread/src/mention.rs @@ -19,7 +19,9 @@ pub enum MentionUri { File { abs_path: PathBuf, }, - PastedImage, + PastedImage { + name: String, + }, Directory { abs_path: PathBuf, }, @@ -155,7 +157,9 @@ impl MentionUri { include_warnings, }) } else if path.starts_with("/agent/pasted-image") { - Ok(Self::PastedImage) + let name = + single_query_param(&url, "name")?.unwrap_or_else(|| "Image".to_string()); + Ok(Self::PastedImage { name }) } else if path.starts_with("/agent/untitled-buffer") { let fragment = url .fragment() @@ -227,7 +231,7 @@ impl MentionUri { .unwrap_or_default() .to_string_lossy() .into_owned(), - MentionUri::PastedImage => "Image".to_string(), + MentionUri::PastedImage { name } => name.clone(), MentionUri::Symbol { name, .. } => name.clone(), MentionUri::Thread { name, .. } => name.clone(), MentionUri::Rule { name, .. } => name.clone(), @@ -296,7 +300,7 @@ impl MentionUri { MentionUri::File { abs_path } => { FileIcons::get_icon(abs_path, cx).unwrap_or_else(|| IconName::File.path().into()) } - MentionUri::PastedImage => IconName::Image.path().into(), + MentionUri::PastedImage { .. } => IconName::Image.path().into(), MentionUri::Directory { abs_path } => FileIcons::get_folder_icon(false, abs_path, cx) .unwrap_or_else(|| IconName::Folder.path().into()), MentionUri::Symbol { .. } => IconName::Code.path().into(), @@ -322,10 +326,18 @@ impl MentionUri { url.set_path(&abs_path.to_string_lossy()); url } - MentionUri::PastedImage => Url::parse("zed:///agent/pasted-image").unwrap(), + MentionUri::PastedImage { name } => { + let mut url = Url::parse("zed:///agent/pasted-image").unwrap(); + url.query_pairs_mut().append_pair("name", name); + url + } MentionUri::Directory { abs_path } => { let mut url = Url::parse("file:///").unwrap(); - url.set_path(&abs_path.to_string_lossy()); + let mut path = abs_path.to_string_lossy().into_owned(); + if !path.ends_with('/') && !path.ends_with('\\') { + path.push('/'); + } + url.set_path(&path); url } MentionUri::Symbol { @@ -490,6 +502,21 @@ mod tests { assert_eq!(uri.to_uri().to_string(), expected); } + #[test] + fn test_directory_uri_round_trip_without_trailing_slash() { + let uri = MentionUri::Directory { + abs_path: PathBuf::from(path!("/path/to/dir")), + }; + let serialized = uri.to_uri().to_string(); + assert!(serialized.ends_with('/'), "directory URI must end with /"); + let parsed = MentionUri::parse(&serialized, PathStyle::local()).unwrap(); + assert!( + matches!(parsed, MentionUri::Directory { .. }), + "expected Directory variant, got {:?}", + parsed + ); + } + #[test] fn test_parse_symbol_uri() { let symbol_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20"); diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs index 3faf767c7020763eadc7db6c93af42f650a07434..1f17d38f7d2a2770350026f2f145a53723ef7481 100644 --- a/crates/action_log/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -738,6 +738,7 @@ impl ActionLog { let task = if let Some(existing_file_content) = existing_file_content { // Capture the agent's content before restoring existing file content let agent_content = buffer.read(cx).text(); + let buffer_id = buffer.read(cx).remote_id(); buffer.update(cx, |buffer, cx| { buffer.start_transaction(); @@ -750,7 +751,10 @@ impl ActionLog { undo_info = Some(PerBufferUndo { buffer: buffer.downgrade(), - edits_to_restore: vec![(Anchor::MIN..Anchor::MAX, agent_content)], + edits_to_restore: vec![( + Anchor::min_for_buffer(buffer_id)..Anchor::max_for_buffer(buffer_id), + agent_content, + )], status: UndoBufferStatus::Created { had_existing_content: true, }, @@ -990,8 +994,8 @@ impl ActionLog { let mut valid_edits = Vec::new(); for (anchor_range, text_to_restore) in per_buffer_undo.edits_to_restore { - if anchor_range.start.buffer_id == Some(buffer.remote_id()) - && anchor_range.end.buffer_id == Some(buffer.remote_id()) + if anchor_range.start.buffer_id == buffer.remote_id() + && anchor_range.end.buffer_id == buffer.remote_id() { valid_edits.push((anchor_range, text_to_restore)); } diff --git a/crates/agent/src/edit_agent.rs b/crates/agent/src/edit_agent.rs index 6e6cf9735a922695bf089bdcc78798fb086ad364..afaa124de066d92e5a1d1a1670f762017f086d01 100644 --- a/crates/agent/src/edit_agent.rs +++ b/crates/agent/src/edit_agent.rs @@ -374,13 +374,13 @@ impl EditAgent { buffer.edit(edits.iter().cloned(), None, cx); let max_edit_end = buffer .summaries_for_anchors::( - edits.iter().map(|(range, _)| &range.end), + edits.iter().map(|(range, _)| range.end), ) .max() .unwrap(); let min_edit_start = buffer .summaries_for_anchors::( - edits.iter().map(|(range, _)| &range.start), + edits.iter().map(|(range, _)| range.start), ) .min() .unwrap(); @@ -1519,7 +1519,7 @@ mod tests { stream: &mut UnboundedReceiver, ) -> Vec { let mut events = Vec::new(); - while let Ok(Some(event)) = stream.try_next() { + while let Ok(event) = stream.try_recv() { events.push(event); } events diff --git a/crates/agent/src/edit_agent/evals.rs b/crates/agent/src/edit_agent/evals.rs index e7b67e37bf4a8b71664a78b99b757c6985794ec6..ba8b7ed867ea26bcdcdee7f8bf20390c2f9592b3 100644 --- a/crates/agent/src/edit_agent/evals.rs +++ b/crates/agent/src/edit_agent/evals.rs @@ -4,7 +4,7 @@ use crate::{ ListDirectoryTool, ListDirectoryToolInput, ReadFileTool, ReadFileToolInput, }; use Role::*; -use client::{Client, UserStore}; +use client::{Client, RefreshLlmTokenListener, UserStore}; use eval_utils::{EvalOutput, EvalOutputProcessor, OutcomeKind}; use fs::FakeFs; use futures::{FutureExt, future::LocalBoxFuture}; @@ -1423,7 +1423,8 @@ impl EditAgentTest { let client = Client::production(cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); settings::init(cx); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store, client.clone(), cx); }); diff --git a/crates/agent/src/tests/edit_file_thread_test.rs b/crates/agent/src/tests/edit_file_thread_test.rs index 3beb5cb0d51abc55fbf3cf0849ced248a9d1fa5c..b5ce6441e790e0b79b2798dfe0008cc74eec69b8 100644 --- a/crates/agent/src/tests/edit_file_thread_test.rs +++ b/crates/agent/src/tests/edit_file_thread_test.rs @@ -202,3 +202,214 @@ async fn test_edit_file_tool_in_thread_context(cx: &mut TestAppContext) { ); }); } + +#[gpui::test] +async fn test_streaming_edit_json_parse_error_does_not_cause_unsaved_changes( + cx: &mut TestAppContext, +) { + super::init_test(cx); + super::always_allow_tools(cx); + + // Enable the streaming edit file tool feature flag. + cx.update(|cx| { + cx.update_flags(true, vec!["streaming-edit-file-tool".to_string()]); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/project"), + json!({ + "src": { + "main.rs": "fn main() {\n println!(\"Hello, world!\");\n}\n" + } + }), + ) + .await; + + let project = project::Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let project_context = cx.new(|_cx| ProjectContext::default()); + let context_server_store = project.read_with(cx, |project, _| project.context_server_store()); + let context_server_registry = + cx.new(|cx| crate::ContextServerRegistry::new(context_server_store.clone(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + model.as_fake().set_supports_streaming_tools(true); + let fake_model = model.as_fake(); + + let thread = cx.new(|cx| { + let mut thread = crate::Thread::new( + project.clone(), + project_context, + context_server_registry, + crate::Templates::new(), + Some(model.clone()), + cx, + ); + let language_registry = project.read(cx).languages().clone(); + thread.add_tool(crate::StreamingEditFileTool::new( + project.clone(), + cx.weak_entity(), + thread.action_log().clone(), + language_registry, + )); + thread + }); + + let _events = thread + .update(cx, |thread, cx| { + thread.send( + UserMessageId::new(), + ["Write new content to src/main.rs"], + cx, + ) + }) + .unwrap(); + cx.run_until_parked(); + + let tool_use_id = "edit_1"; + let partial_1 = LanguageModelToolUse { + id: tool_use_id.into(), + name: EditFileTool::NAME.into(), + raw_input: json!({ + "display_description": "Rewrite main.rs", + "path": "project/src/main.rs", + "mode": "write" + }) + .to_string(), + input: json!({ + "display_description": "Rewrite main.rs", + "path": "project/src/main.rs", + "mode": "write" + }), + is_input_complete: false, + thought_signature: None, + }; + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(partial_1)); + cx.run_until_parked(); + + let partial_2 = LanguageModelToolUse { + id: tool_use_id.into(), + name: EditFileTool::NAME.into(), + raw_input: json!({ + "display_description": "Rewrite main.rs", + "path": "project/src/main.rs", + "mode": "write", + "content": "fn main() { /* rewritten */ }" + }) + .to_string(), + input: json!({ + "display_description": "Rewrite main.rs", + "path": "project/src/main.rs", + "mode": "write", + "content": "fn main() { /* rewritten */ }" + }), + is_input_complete: false, + thought_signature: None, + }; + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(partial_2)); + cx.run_until_parked(); + + // Now send a json parse error. At this point we have started writing content to the buffer. + fake_model.send_last_completion_stream_event( + LanguageModelCompletionEvent::ToolUseJsonParseError { + id: tool_use_id.into(), + tool_name: EditFileTool::NAME.into(), + raw_input: r#"{"display_description":"Rewrite main.rs","path":"project/src/main.rs","mode":"write","content":"fn main() { /* rewritten "#.into(), + json_parse_error: "EOF while parsing a string at line 1 column 95".into(), + }, + ); + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + // cx.executor().advance_clock(Duration::from_secs(5)); + // cx.run_until_parked(); + + assert!( + !fake_model.pending_completions().is_empty(), + "Thread should have retried after the error" + ); + + // Respond with a new, well-formed, complete edit_file tool use. + let tool_use = LanguageModelToolUse { + id: "edit_2".into(), + name: EditFileTool::NAME.into(), + raw_input: json!({ + "display_description": "Rewrite main.rs", + "path": "project/src/main.rs", + "mode": "write", + "content": "fn main() {\n println!(\"Hello, rewritten!\");\n}\n" + }) + .to_string(), + input: json!({ + "display_description": "Rewrite main.rs", + "path": "project/src/main.rs", + "mode": "write", + "content": "fn main() {\n println!(\"Hello, rewritten!\");\n}\n" + }), + is_input_complete: true, + thought_signature: None, + }; + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use)); + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + let pending_completions = fake_model.pending_completions(); + assert!( + pending_completions.len() == 1, + "Expected only the follow-up completion containing the successful tool result" + ); + + let completion = pending_completions + .into_iter() + .last() + .expect("Expected a completion containing the tool result for edit_2"); + + let tool_result = completion + .messages + .iter() + .flat_map(|msg| &msg.content) + .find_map(|content| match content { + language_model::MessageContent::ToolResult(result) + if result.tool_use_id == language_model::LanguageModelToolUseId::from("edit_2") => + { + Some(result) + } + _ => None, + }) + .expect("Should have a tool result for edit_2"); + + // Ensure that the second tool call completed successfully and edits were applied. + assert!( + !tool_result.is_error, + "Tool result should succeed, got: {:?}", + tool_result + ); + let content_text = match &tool_result.content { + language_model::LanguageModelToolResultContent::Text(t) => t.to_string(), + other => panic!("Expected text content, got: {:?}", other), + }; + assert!( + !content_text.contains("file has been modified since you last read it"), + "Did not expect a stale last-read error, got: {content_text}" + ); + assert!( + !content_text.contains("This file has unsaved changes"), + "Did not expect an unsaved-changes error, got: {content_text}" + ); + + let file_content = fs + .load(path!("/project/src/main.rs").as_ref()) + .await + .expect("file should exist"); + super::assert_eq!( + file_content, + "fn main() {\n println!(\"Hello, rewritten!\");\n}\n", + "The second edit should be applied and saved gracefully" + ); + + fake_model.end_last_completion_stream(); + cx.run_until_parked(); +} diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index 036a6f1030c43b16d51f864a1d0176891e90b772..ff53136a0ded4bbc283fea30598d8d30e6e29709 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -6,7 +6,7 @@ use acp_thread::{ use agent_client_protocol::{self as acp}; use agent_settings::AgentProfileId; use anyhow::Result; -use client::{Client, UserStore}; +use client::{Client, RefreshLlmTokenListener, UserStore}; use collections::IndexMap; use context_server::{ContextServer, ContextServerCommand, ContextServerId}; use feature_flags::FeatureFlagAppExt as _; @@ -3253,7 +3253,8 @@ async fn test_agent_connection(cx: &mut TestAppContext) { let clock = Arc::new(clock::FakeSystemClock::new()); let client = Client::new(clock, http_client, cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store, client.clone(), cx); LanguageModelRegistry::test(cx); }); @@ -3902,6 +3903,117 @@ async fn test_streaming_tool_completes_when_llm_stream_ends_without_final_input( }); } +#[gpui::test] +async fn test_streaming_tool_json_parse_error_is_forwarded_to_running_tool( + cx: &mut TestAppContext, +) { + init_test(cx); + always_allow_tools(cx); + + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + thread.update(cx, |thread, _cx| { + thread.add_tool(StreamingJsonErrorContextTool); + }); + + let _events = thread + .update(cx, |thread, cx| { + thread.send( + UserMessageId::new(), + ["Use the streaming_json_error_context tool"], + cx, + ) + }) + .unwrap(); + cx.run_until_parked(); + + let tool_use = LanguageModelToolUse { + id: "tool_1".into(), + name: StreamingJsonErrorContextTool::NAME.into(), + raw_input: r#"{"text": "partial"#.into(), + input: json!({"text": "partial"}), + is_input_complete: false, + thought_signature: None, + }; + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use)); + cx.run_until_parked(); + + fake_model.send_last_completion_stream_event( + LanguageModelCompletionEvent::ToolUseJsonParseError { + id: "tool_1".into(), + tool_name: StreamingJsonErrorContextTool::NAME.into(), + raw_input: r#"{"text": "partial"#.into(), + json_parse_error: "EOF while parsing a string at line 1 column 17".into(), + }, + ); + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + cx.executor().advance_clock(Duration::from_secs(5)); + cx.run_until_parked(); + + let completion = fake_model + .pending_completions() + .pop() + .expect("No running turn"); + + let tool_results: Vec<_> = completion + .messages + .iter() + .flat_map(|message| &message.content) + .filter_map(|content| match content { + MessageContent::ToolResult(result) + if result.tool_use_id == language_model::LanguageModelToolUseId::from("tool_1") => + { + Some(result) + } + _ => None, + }) + .collect(); + + assert_eq!( + tool_results.len(), + 1, + "Expected exactly 1 tool result for tool_1, got {}: {:#?}", + tool_results.len(), + tool_results + ); + + let result = tool_results[0]; + assert!(result.is_error); + let content_text = match &result.content { + language_model::LanguageModelToolResultContent::Text(text) => text.to_string(), + other => panic!("Expected text content, got {:?}", other), + }; + assert!( + content_text.contains("Saw partial text 'partial' before invalid JSON"), + "Expected tool-enriched partial context, got: {content_text}" + ); + assert!( + content_text + .contains("Error parsing input JSON: EOF while parsing a string at line 1 column 17"), + "Expected forwarded JSON parse error, got: {content_text}" + ); + assert!( + !content_text.contains("tool input was not fully received"), + "Should not contain orphaned sender error, got: {content_text}" + ); + + fake_model.send_last_completion_stream_text_chunk("Done"); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + thread.read_with(cx, |thread, _cx| { + assert!( + thread.is_turn_complete(), + "Thread should not be stuck; the turn should have completed", + ); + }); +} + /// Filters out the stop events for asserting against in tests fn stop_events(result_events: Vec>) -> Vec { result_events @@ -3958,6 +4070,7 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest { InfiniteTool::NAME: true, CancellationAwareTool::NAME: true, StreamingEchoTool::NAME: true, + StreamingJsonErrorContextTool::NAME: true, StreamingFailingEchoTool::NAME: true, TerminalTool::NAME: true, UpdatePlanTool::NAME: true, @@ -3982,7 +4095,8 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest { cx.set_http_client(Arc::new(http_client)); let client = Client::production(cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store, client.clone(), cx); } }; @@ -6206,9 +6320,9 @@ async fn test_edit_file_tool_allow_rule_skips_confirmation(cx: &mut TestAppConte cx.run_until_parked(); - let event = rx.try_next(); + let event = rx.try_recv(); assert!( - !matches!(event, Ok(Some(Ok(ThreadEvent::ToolCallAuthorization(_))))), + !matches!(event, Ok(Ok(ThreadEvent::ToolCallAuthorization(_)))), "expected no authorization request for allowed .md file" ); } @@ -6350,9 +6464,9 @@ async fn test_fetch_tool_allow_rule_skips_confirmation(cx: &mut TestAppContext) cx.run_until_parked(); - let event = rx.try_next(); + let event = rx.try_recv(); assert!( - !matches!(event, Ok(Some(Ok(ThreadEvent::ToolCallAuthorization(_))))), + !matches!(event, Ok(Ok(ThreadEvent::ToolCallAuthorization(_)))), "expected no authorization request for allowed docs.rs URL" ); } diff --git a/crates/agent/src/tests/test_tools.rs b/crates/agent/src/tests/test_tools.rs index f36549a6c42f9e810c7794d8ec683613b6ae6933..4744204fae1213d49af92339b8847e9d1f470125 100644 --- a/crates/agent/src/tests/test_tools.rs +++ b/crates/agent/src/tests/test_tools.rs @@ -56,13 +56,12 @@ impl AgentTool for StreamingEchoTool { fn run( self: Arc, - mut input: ToolInput, + input: ToolInput, _event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { let wait_until_complete_rx = self.wait_until_complete_rx.lock().unwrap().take(); cx.spawn(async move |_cx| { - while input.recv_partial().await.is_some() {} let input = input .recv() .await @@ -75,6 +74,68 @@ impl AgentTool for StreamingEchoTool { } } +#[derive(JsonSchema, Serialize, Deserialize)] +pub struct StreamingJsonErrorContextToolInput { + /// The text to echo. + pub text: String, +} + +pub struct StreamingJsonErrorContextTool; + +impl AgentTool for StreamingJsonErrorContextTool { + type Input = StreamingJsonErrorContextToolInput; + type Output = String; + + const NAME: &'static str = "streaming_json_error_context"; + + fn supports_input_streaming() -> bool { + true + } + + fn kind() -> acp::ToolKind { + acp::ToolKind::Other + } + + fn initial_title( + &self, + _input: Result, + _cx: &mut App, + ) -> SharedString { + "Streaming JSON Error Context".into() + } + + fn run( + self: Arc, + mut input: ToolInput, + _event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + cx.spawn(async move |_cx| { + let mut last_partial_text = None; + + loop { + match input.next().await { + Ok(ToolInputPayload::Partial(partial)) => { + if let Some(text) = partial.get("text").and_then(|value| value.as_str()) { + last_partial_text = Some(text.to_string()); + } + } + Ok(ToolInputPayload::Full(input)) => return Ok(input.text), + Ok(ToolInputPayload::InvalidJson { error_message }) => { + let partial_text = last_partial_text.unwrap_or_default(); + return Err(format!( + "Saw partial text '{partial_text}' before invalid JSON: {error_message}" + )); + } + Err(error) => { + return Err(format!("Failed to receive tool input: {error}")); + } + } + } + }) + } +} + /// A streaming tool that echoes its input, used to test streaming tool /// lifecycle (e.g. partial delivery and cleanup when the LLM stream ends /// before `is_input_complete`). @@ -119,7 +180,7 @@ impl AgentTool for StreamingFailingEchoTool { ) -> Task> { cx.spawn(async move |_cx| { for _ in 0..self.receive_chunks_until_failure { - let _ = input.recv_partial().await; + let _ = input.next().await; } Err("failed".into()) }) diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index b61df1b8af84d312d7f186fb85e5a1d04ab59dfd..ea342e8db4e4d97d5eccc849121cd0fd2e403017 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -22,13 +22,13 @@ use client::UserStore; use cloud_api_types::Plan; use collections::{HashMap, HashSet, IndexMap}; use fs::Fs; -use futures::stream; use futures::{ FutureExt, channel::{mpsc, oneshot}, future::Shared, stream::FuturesUnordered, }; +use futures::{StreamExt, stream}; use gpui::{ App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity, }; @@ -47,7 +47,6 @@ use schemars::{JsonSchema, Schema}; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use settings::{LanguageModelSelection, Settings, ToolPermissionMode, update_settings_file}; -use smol::stream::StreamExt; use std::{ collections::BTreeMap, marker::PhantomData, @@ -253,7 +252,7 @@ impl UserMessage { ) .ok(); } - MentionUri::PastedImage => { + MentionUri::PastedImage { .. } => { debug_panic!("pasted image URI should not be used in mention content") } MentionUri::Directory { .. } => { @@ -2095,7 +2094,7 @@ impl Thread { this.update(cx, |this, _cx| { this.pending_message() .tool_results - .insert(tool_result.tool_use_id.clone(), tool_result); + .insert(tool_result.tool_use_id.clone(), tool_result) })?; Ok(()) } @@ -2195,15 +2194,15 @@ impl Thread { raw_input, json_parse_error, } => { - return Ok(Some(Task::ready( - self.handle_tool_use_json_parse_error_event( - id, - tool_name, - raw_input, - json_parse_error, - event_stream, - ), - ))); + return Ok(self.handle_tool_use_json_parse_error_event( + id, + tool_name, + raw_input, + json_parse_error, + event_stream, + cancellation_rx, + cx, + )); } UsageUpdate(usage) => { telemetry::event!( @@ -2304,12 +2303,12 @@ impl Thread { if !tool_use.is_input_complete { if tool.supports_input_streaming() { let running_turn = self.running_turn.as_mut()?; - if let Some(sender) = running_turn.streaming_tool_inputs.get(&tool_use.id) { + if let Some(sender) = running_turn.streaming_tool_inputs.get_mut(&tool_use.id) { sender.send_partial(tool_use.input); return None; } - let (sender, tool_input) = ToolInputSender::channel(); + let (mut sender, tool_input) = ToolInputSender::channel(); sender.send_partial(tool_use.input); running_turn .streaming_tool_inputs @@ -2331,13 +2330,13 @@ impl Thread { } } - if let Some(sender) = self + if let Some(mut sender) = self .running_turn .as_mut()? .streaming_tool_inputs .remove(&tool_use.id) { - sender.send_final(tool_use.input); + sender.send_full(tool_use.input); return None; } @@ -2410,10 +2409,12 @@ impl Thread { raw_input: Arc, json_parse_error: String, event_stream: &ThreadEventStream, - ) -> LanguageModelToolResult { + cancellation_rx: watch::Receiver, + cx: &mut Context, + ) -> Option> { let tool_use = LanguageModelToolUse { - id: tool_use_id.clone(), - name: tool_name.clone(), + id: tool_use_id, + name: tool_name, raw_input: raw_input.to_string(), input: serde_json::json!({}), is_input_complete: true, @@ -2426,14 +2427,43 @@ impl Thread { event_stream, ); - let tool_output = format!("Error parsing input JSON: {json_parse_error}"); - LanguageModelToolResult { - tool_use_id, - tool_name, - is_error: true, - content: LanguageModelToolResultContent::Text(tool_output.into()), - output: Some(serde_json::Value::String(raw_input.to_string())), + let tool = self.tool(tool_use.name.as_ref()); + + let Some(tool) = tool else { + let content = format!("No tool named {} exists", tool_use.name); + return Some(Task::ready(LanguageModelToolResult { + content: LanguageModelToolResultContent::Text(Arc::from(content)), + tool_use_id: tool_use.id, + tool_name: tool_use.name, + is_error: true, + output: None, + })); + }; + + let error_message = format!("Error parsing input JSON: {json_parse_error}"); + + if tool.supports_input_streaming() + && let Some(mut sender) = self + .running_turn + .as_mut()? + .streaming_tool_inputs + .remove(&tool_use.id) + { + sender.send_invalid_json(error_message); + return None; } + + log::debug!("Running tool {}. Received invalid JSON", tool_use.name); + let tool_input = ToolInput::invalid_json(error_message); + Some(self.run_tool( + tool, + tool_input, + tool_use.id, + tool_use.name, + event_stream, + cancellation_rx, + cx, + )) } fn send_or_update_tool_use( @@ -3114,8 +3144,7 @@ impl EventEmitter for Thread {} /// For streaming tools, partial JSON snapshots arrive via `.recv_partial()` as the LLM streams /// them, followed by the final complete input available through `.recv()`. pub struct ToolInput { - partial_rx: mpsc::UnboundedReceiver, - final_rx: oneshot::Receiver, + rx: mpsc::UnboundedReceiver>, _phantom: PhantomData, } @@ -3127,13 +3156,20 @@ impl ToolInput { } pub fn ready(value: serde_json::Value) -> Self { - let (partial_tx, partial_rx) = mpsc::unbounded(); - drop(partial_tx); - let (final_tx, final_rx) = oneshot::channel(); - final_tx.send(value).ok(); + let (tx, rx) = mpsc::unbounded(); + tx.unbounded_send(ToolInputPayload::Full(value)).ok(); Self { - partial_rx, - final_rx, + rx, + _phantom: PhantomData, + } + } + + pub fn invalid_json(error_message: String) -> Self { + let (tx, rx) = mpsc::unbounded(); + tx.unbounded_send(ToolInputPayload::InvalidJson { error_message }) + .ok(); + Self { + rx, _phantom: PhantomData, } } @@ -3147,65 +3183,89 @@ impl ToolInput { /// Wait for the final deserialized input, ignoring all partial updates. /// Non-streaming tools can use this to wait until the whole input is available. pub async fn recv(mut self) -> Result { - // Drain any remaining partials - while self.partial_rx.next().await.is_some() {} + while let Ok(value) = self.next().await { + match value { + ToolInputPayload::Full(value) => return Ok(value), + ToolInputPayload::Partial(_) => {} + ToolInputPayload::InvalidJson { error_message } => { + return Err(anyhow!(error_message)); + } + } + } + Err(anyhow!("tool input was not fully received")) + } + + pub async fn next(&mut self) -> Result> { let value = self - .final_rx + .rx + .next() .await - .map_err(|_| anyhow!("tool input was not fully received"))?; - serde_json::from_value(value).map_err(Into::into) - } + .ok_or_else(|| anyhow!("tool input was not fully received"))?; - /// Returns the next partial JSON snapshot, or `None` when input is complete. - /// Once this returns `None`, call `recv()` to get the final input. - pub async fn recv_partial(&mut self) -> Option { - self.partial_rx.next().await + Ok(match value { + ToolInputPayload::Partial(payload) => ToolInputPayload::Partial(payload), + ToolInputPayload::Full(payload) => { + ToolInputPayload::Full(serde_json::from_value(payload)?) + } + ToolInputPayload::InvalidJson { error_message } => { + ToolInputPayload::InvalidJson { error_message } + } + }) } fn cast(self) -> ToolInput { ToolInput { - partial_rx: self.partial_rx, - final_rx: self.final_rx, + rx: self.rx, _phantom: PhantomData, } } } +pub enum ToolInputPayload { + Partial(serde_json::Value), + Full(T), + InvalidJson { error_message: String }, +} + pub struct ToolInputSender { - partial_tx: mpsc::UnboundedSender, - final_tx: Option>, + has_received_final: bool, + tx: mpsc::UnboundedSender>, } impl ToolInputSender { pub(crate) fn channel() -> (Self, ToolInput) { - let (partial_tx, partial_rx) = mpsc::unbounded(); - let (final_tx, final_rx) = oneshot::channel(); + let (tx, rx) = mpsc::unbounded(); let sender = Self { - partial_tx, - final_tx: Some(final_tx), + tx, + has_received_final: false, }; let input = ToolInput { - partial_rx, - final_rx, + rx, _phantom: PhantomData, }; (sender, input) } pub(crate) fn has_received_final(&self) -> bool { - self.final_tx.is_none() + self.has_received_final } - pub(crate) fn send_partial(&self, value: serde_json::Value) { - self.partial_tx.unbounded_send(value).ok(); + pub fn send_partial(&mut self, payload: serde_json::Value) { + self.tx + .unbounded_send(ToolInputPayload::Partial(payload)) + .ok(); } - pub(crate) fn send_final(mut self, value: serde_json::Value) { - // Close the partial channel so recv_partial() returns None - self.partial_tx.close_channel(); - if let Some(final_tx) = self.final_tx.take() { - final_tx.send(value).ok(); - } + pub fn send_full(&mut self, payload: serde_json::Value) { + self.has_received_final = true; + self.tx.unbounded_send(ToolInputPayload::Full(payload)).ok(); + } + + pub fn send_invalid_json(&mut self, error_message: String) { + self.has_received_final = true; + self.tx + .unbounded_send(ToolInputPayload::InvalidJson { error_message }) + .ok(); } } @@ -4251,68 +4311,78 @@ mod tests { ) { let (thread, event_stream) = setup_thread_for_test(cx).await; - cx.update(|cx| { - thread.update(cx, |thread, _cx| { - let tool_use_id = LanguageModelToolUseId::from("test_tool_id"); - let tool_name: Arc = Arc::from("test_tool"); - let raw_input: Arc = Arc::from("{invalid json"); - let json_parse_error = "expected value at line 1 column 1".to_string(); - - // Call the function under test - let result = thread.handle_tool_use_json_parse_error_event( - tool_use_id.clone(), - tool_name.clone(), - raw_input.clone(), - json_parse_error, - &event_stream, - ); - - // Verify the result is an error - assert!(result.is_error); - assert_eq!(result.tool_use_id, tool_use_id); - assert_eq!(result.tool_name, tool_name); - assert!(matches!( - result.content, - LanguageModelToolResultContent::Text(_) - )); - - // Verify the tool use was added to the message content - { - let last_message = thread.pending_message(); - assert_eq!( - last_message.content.len(), - 1, - "Should have one tool_use in content" - ); - - match &last_message.content[0] { - AgentMessageContent::ToolUse(tool_use) => { - assert_eq!(tool_use.id, tool_use_id); - assert_eq!(tool_use.name, tool_name); - assert_eq!(tool_use.raw_input, raw_input.to_string()); - assert!(tool_use.is_input_complete); - // Should fall back to empty object for invalid JSON - assert_eq!(tool_use.input, json!({})); - } - _ => panic!("Expected ToolUse content"), - } - } - - // Insert the tool result (simulating what the caller does) - thread - .pending_message() - .tool_results - .insert(result.tool_use_id.clone(), result); + let tool_use_id = LanguageModelToolUseId::from("test_tool_id"); + let tool_name: Arc = Arc::from("test_tool"); + let raw_input: Arc = Arc::from("{invalid json"); + let json_parse_error = "expected value at line 1 column 1".to_string(); + + let (_cancellation_tx, cancellation_rx) = watch::channel(false); + + let result = cx + .update(|cx| { + thread.update(cx, |thread, cx| { + // Call the function under test + thread + .handle_tool_use_json_parse_error_event( + tool_use_id.clone(), + tool_name.clone(), + raw_input.clone(), + json_parse_error, + &event_stream, + cancellation_rx, + cx, + ) + .unwrap() + }) + }) + .await; + + // Verify the result is an error + assert!(result.is_error); + assert_eq!(result.tool_use_id, tool_use_id); + assert_eq!(result.tool_name, tool_name); + assert!(matches!( + result.content, + LanguageModelToolResultContent::Text(_) + )); - // Verify the tool result was added + thread.update(cx, |thread, _cx| { + // Verify the tool use was added to the message content + { let last_message = thread.pending_message(); assert_eq!( - last_message.tool_results.len(), + last_message.content.len(), 1, - "Should have one tool_result" + "Should have one tool_use in content" ); - assert!(last_message.tool_results.contains_key(&tool_use_id)); - }); - }); + + match &last_message.content[0] { + AgentMessageContent::ToolUse(tool_use) => { + assert_eq!(tool_use.id, tool_use_id); + assert_eq!(tool_use.name, tool_name); + assert_eq!(tool_use.raw_input, raw_input.to_string()); + assert!(tool_use.is_input_complete); + // Should fall back to empty object for invalid JSON + assert_eq!(tool_use.input, json!({})); + } + _ => panic!("Expected ToolUse content"), + } + } + + // Insert the tool result (simulating what the caller does) + thread + .pending_message() + .tool_results + .insert(result.tool_use_id.clone(), result); + + // Verify the tool result was added + let last_message = thread.pending_message(); + assert_eq!( + last_message.tool_results.len(), + 1, + "Should have one tool_result" + ); + assert!(last_message.tool_results.contains_key(&tool_use_id)); + }) } } diff --git a/crates/agent/src/tool_permissions.rs b/crates/agent/src/tool_permissions.rs index e74b6e4c5ce34383ad7ea702f1ba3a0cfd028455..58e779da59aef176464839ed6f2d6a5c16e4bc12 100644 --- a/crates/agent/src/tool_permissions.rs +++ b/crates/agent/src/tool_permissions.rs @@ -563,7 +563,7 @@ mod tests { use crate::tools::{DeletePathTool, EditFileTool, FetchTool, TerminalTool}; use agent_settings::{AgentProfileId, CompiledRegex, InvalidRegexPattern, ToolRules}; use gpui::px; - use settings::{DockPosition, NotifyWhenAgentWaiting}; + use settings::{DockPosition, NotifyWhenAgentWaiting, PlaySoundWhenAgentDone}; use std::sync::Arc; fn test_agent_settings(tool_permissions: ToolPermissions) -> AgentSettings { @@ -584,7 +584,7 @@ mod tests { default_profile: AgentProfileId::default(), profiles: Default::default(), notify_when_agent_waiting: NotifyWhenAgentWaiting::default(), - play_sound_when_agent_done: false, + play_sound_when_agent_done: PlaySoundWhenAgentDone::default(), single_file_review: false, model_parameters: vec![], enable_feedback: false, @@ -595,6 +595,7 @@ mod tests { message_editor_min_lines: 1, tool_permissions, show_turn_stats: false, + show_merge_conflict_indicator: true, new_thread_location: Default::default(), sidebar_side: Default::default(), thinking_display: Default::default(), diff --git a/crates/agent/src/tools/copy_path_tool.rs b/crates/agent/src/tools/copy_path_tool.rs index 95688f27dcd8ca04aef72358ce52144f95138e17..06600f64874851c8d703513ea006d7f0327a0952 100644 --- a/crates/agent/src/tools/copy_path_tool.rs +++ b/crates/agent/src/tools/copy_path_tool.rs @@ -383,8 +383,8 @@ mod tests { assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); @@ -450,8 +450,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); diff --git a/crates/agent/src/tools/create_directory_tool.rs b/crates/agent/src/tools/create_directory_tool.rs index d6c59bcce30ab26991edba0fa7181ec45d10e1b0..60bb44e39ee5ab76168d909c08889cbbbc63f9f4 100644 --- a/crates/agent/src/tools/create_directory_tool.rs +++ b/crates/agent/src/tools/create_directory_tool.rs @@ -370,8 +370,8 @@ mod tests { assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); @@ -440,8 +440,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); diff --git a/crates/agent/src/tools/delete_path_tool.rs b/crates/agent/src/tools/delete_path_tool.rs index 7433975c7b782a145dd3e5a80ee59cd92945a989..21b4674425d9169e7740dd35c929302814006684 100644 --- a/crates/agent/src/tools/delete_path_tool.rs +++ b/crates/agent/src/tools/delete_path_tool.rs @@ -439,8 +439,8 @@ mod tests { assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); @@ -513,8 +513,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index 763efd6724a719b90af93843f203ef8c1c3976bb..9bcf164096b99675febd3d7ae1bde8341f7c5ff8 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -1188,7 +1188,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // Test 4: Path with .zed in the middle should require confirmation let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); @@ -1251,7 +1251,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // 5.3: Normal in-project path with allow — no confirmation needed let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); @@ -1268,7 +1268,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // 5.4: With Confirm default, non-project paths still prompt cx.update(|cx| { @@ -1586,8 +1586,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - stream_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + stream_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); @@ -1658,7 +1658,7 @@ mod tests { } else { auth.await.unwrap(); assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -1769,7 +1769,7 @@ mod tests { } else { auth.await.unwrap(); assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -1862,7 +1862,7 @@ mod tests { stream_rx.expect_authorization().await; } else { assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -1963,7 +1963,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); } } diff --git a/crates/agent/src/tools/evals/streaming_edit_file.rs b/crates/agent/src/tools/evals/streaming_edit_file.rs index 6a55517037e54ae4166cd22427201d9325ef0f76..0c6290ec098f9c37a0f6a077daf0a041c013d8ff 100644 --- a/crates/agent/src/tools/evals/streaming_edit_file.rs +++ b/crates/agent/src/tools/evals/streaming_edit_file.rs @@ -6,7 +6,7 @@ use crate::{ }; use Role::*; use anyhow::{Context as _, Result}; -use client::{Client, UserStore}; +use client::{Client, RefreshLlmTokenListener, UserStore}; use fs::FakeFs; use futures::{FutureExt, StreamExt, future::LocalBoxFuture}; use gpui::{AppContext as _, AsyncApp, Entity, TestAppContext, UpdateGlobal as _}; @@ -274,7 +274,8 @@ impl StreamingEditToolTest { cx.set_http_client(http_client); let client = Client::production(cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store, client, cx); }); diff --git a/crates/agent/src/tools/list_directory_tool.rs b/crates/agent/src/tools/list_directory_tool.rs index 7abbe1ed4c488210b9079e59765dddc8d5208bed..c88492bba40ee4fdfa928f153e49a302ad60be8b 100644 --- a/crates/agent/src/tools/list_directory_tool.rs +++ b/crates/agent/src/tools/list_directory_tool.rs @@ -982,13 +982,11 @@ mod tests { "Expected private path validation error, got: {error}" ); - let event = event_rx.try_next(); + let event = event_rx.try_recv(); assert!( !matches!( event, - Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization( - _ - )))) + Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_))) ), "No authorization should be requested when validation fails before listing", ); @@ -1030,13 +1028,11 @@ mod tests { "Normal path should succeed without authorization" ); - let event = event_rx.try_next(); + let event = event_rx.try_recv(); assert!( !matches!( event, - Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization( - _ - )))) + Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_))) ), "No authorization should be requested for normal paths", ); @@ -1087,13 +1083,11 @@ mod tests { "Intra-project symlink should succeed without authorization: {result:?}", ); - let event = event_rx.try_next(); + let event = event_rx.try_recv(); assert!( !matches!( event, - Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization( - _ - )))) + Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_))) ), "No authorization should be requested for intra-project symlinks", ); diff --git a/crates/agent/src/tools/move_path_tool.rs b/crates/agent/src/tools/move_path_tool.rs index 147947bb67ec646c38b51f37dd75779ed78ec85b..eaea204d84d96ab841f2e075a42a1a42b827374d 100644 --- a/crates/agent/src/tools/move_path_tool.rs +++ b/crates/agent/src/tools/move_path_tool.rs @@ -390,8 +390,8 @@ mod tests { assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); @@ -457,8 +457,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); diff --git a/crates/agent/src/tools/read_file_tool.rs b/crates/agent/src/tools/read_file_tool.rs index 093a8580892cfc4cec0a061bcc10717b28c608f2..0086a82f4e79c9924502202873ceb2b25d2e66fb 100644 --- a/crates/agent/src/tools/read_file_tool.rs +++ b/crates/agent/src/tools/read_file_tool.rs @@ -1317,13 +1317,11 @@ mod test { "Expected private-files validation error, got: {error}" ); - let event = event_rx.try_next(); + let event = event_rx.try_recv(); assert!( !matches!( event, - Ok(Some(Ok(crate::thread::ThreadEvent::ToolCallAuthorization( - _ - )))) + Ok(Ok(crate::thread::ThreadEvent::ToolCallAuthorization(_))) ), "No authorization should be requested when validation fails before read", ); diff --git a/crates/agent/src/tools/restore_file_from_disk_tool.rs b/crates/agent/src/tools/restore_file_from_disk_tool.rs index 9273ea5b8bb041e0ea53f3ea72b94b46e5a7e294..b808a966cf983c92a5e93c19599ff5333ed70860 100644 --- a/crates/agent/src/tools/restore_file_from_disk_tool.rs +++ b/crates/agent/src/tools/restore_file_from_disk_tool.rs @@ -589,8 +589,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); @@ -662,8 +662,8 @@ mod tests { assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); diff --git a/crates/agent/src/tools/save_file_tool.rs b/crates/agent/src/tools/save_file_tool.rs index c6a1cd79db65127164fe66f966029b58a366da7f..0cf9666a415f8174e9036ebadf8368589294c885 100644 --- a/crates/agent/src/tools/save_file_tool.rs +++ b/crates/agent/src/tools/save_file_tool.rs @@ -584,8 +584,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); @@ -657,8 +657,8 @@ mod tests { assert!( !matches!( - event_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + event_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Expected a single authorization prompt", ); diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index 88ec1e67787ad6efbeaa46b83b9034a24b10d3db..47da35bbf25ad188f3f6b98e843b2955910bb7ac 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -2,6 +2,7 @@ use super::edit_file_tool::EditFileTool; use super::restore_file_from_disk_tool::RestoreFileFromDiskTool; use super::save_file_tool::SaveFileTool; use super::tool_edit_parser::{ToolEditEvent, ToolEditParser}; +use crate::ToolInputPayload; use crate::{ AgentTool, Thread, ToolCallEventStream, ToolInput, edit_agent::{ @@ -12,7 +13,7 @@ use crate::{ use acp_thread::Diff; use action_log::ActionLog; use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields}; -use anyhow::{Context as _, Result}; +use anyhow::Result; use collections::HashSet; use futures::FutureExt as _; use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity}; @@ -188,6 +189,10 @@ pub enum StreamingEditFileToolOutput { }, Error { error: String, + #[serde(default)] + input_path: Option, + #[serde(default)] + diff: String, }, } @@ -195,6 +200,8 @@ impl StreamingEditFileToolOutput { pub fn error(error: impl Into) -> Self { Self::Error { error: error.into(), + input_path: None, + diff: String::new(), } } } @@ -215,7 +222,24 @@ impl std::fmt::Display for StreamingEditFileToolOutput { ) } } - StreamingEditFileToolOutput::Error { error } => write!(f, "{error}"), + StreamingEditFileToolOutput::Error { + error, + diff, + input_path, + } => { + write!(f, "{error}\n")?; + if let Some(input_path) = input_path + && !diff.is_empty() + { + write!( + f, + "Edited {}:\n\n```diff\n{diff}\n```", + input_path.display() + ) + } else { + write!(f, "No edits were made.") + } + } } } } @@ -233,6 +257,14 @@ pub struct StreamingEditFileTool { language_registry: Arc, } +enum EditSessionResult { + Completed(EditSession), + Failed { + error: String, + session: Option, + }, +} + impl StreamingEditFileTool { pub fn new( project: Entity, @@ -276,6 +308,158 @@ impl StreamingEditFileTool { }); } } + + async fn ensure_buffer_saved(&self, buffer: &Entity, cx: &mut AsyncApp) { + let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| { + let settings = language_settings::LanguageSettings::for_buffer(buffer, cx); + settings.format_on_save != FormatOnSave::Off + }); + + if format_on_save_enabled { + self.project + .update(cx, |project, cx| { + project.format( + HashSet::from_iter([buffer.clone()]), + LspFormatTarget::Buffers, + false, + FormatTrigger::Save, + cx, + ) + }) + .await + .log_err(); + } + + self.project + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) + .await + .log_err(); + + self.action_log.update(cx, |log, cx| { + log.buffer_edited(buffer.clone(), cx); + }); + } + + async fn process_streaming_edits( + &self, + input: &mut ToolInput, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> EditSessionResult { + let mut session: Option = None; + let mut last_partial: Option = None; + + loop { + futures::select! { + payload = input.next().fuse() => { + match payload { + Ok(payload) => match payload { + ToolInputPayload::Partial(partial) => { + if let Ok(parsed) = serde_json::from_value::(partial) { + let path_complete = parsed.path.is_some() + && parsed.path.as_ref() == last_partial.as_ref().and_then(|partial| partial.path.as_ref()); + + last_partial = Some(parsed.clone()); + + if session.is_none() + && path_complete + && let StreamingEditFileToolPartialInput { + path: Some(path), + display_description: Some(display_description), + mode: Some(mode), + .. + } = &parsed + { + match EditSession::new( + PathBuf::from(path), + display_description, + *mode, + self, + event_stream, + cx, + ) + .await + { + Ok(created_session) => session = Some(created_session), + Err(error) => { + log::error!("Failed to create edit session: {}", error); + return EditSessionResult::Failed { + error, + session: None, + }; + } + } + } + + if let Some(current_session) = &mut session + && let Err(error) = current_session.process(parsed, self, event_stream, cx) + { + log::error!("Failed to process edit: {}", error); + return EditSessionResult::Failed { error, session }; + } + } + } + ToolInputPayload::Full(full_input) => { + let mut session = if let Some(session) = session { + session + } else { + match EditSession::new( + full_input.path.clone(), + &full_input.display_description, + full_input.mode, + self, + event_stream, + cx, + ) + .await + { + Ok(created_session) => created_session, + Err(error) => { + log::error!("Failed to create edit session: {}", error); + return EditSessionResult::Failed { + error, + session: None, + }; + } + } + }; + + return match session.finalize(full_input, self, event_stream, cx).await { + Ok(()) => EditSessionResult::Completed(session), + Err(error) => { + log::error!("Failed to finalize edit: {}", error); + EditSessionResult::Failed { + error, + session: Some(session), + } + } + }; + } + ToolInputPayload::InvalidJson { error_message } => { + log::error!("Received invalid JSON: {error_message}"); + return EditSessionResult::Failed { + error: error_message, + session, + }; + } + }, + Err(error) => { + return EditSessionResult::Failed { + error: format!("Failed to receive tool input: {error}"), + session, + }; + } + } + } + _ = event_stream.cancelled_by_user().fuse() => { + return EditSessionResult::Failed { + error: "Edit cancelled by user".to_string(), + session, + }; + } + } + } + } } impl AgentTool for StreamingEditFileTool { @@ -348,94 +532,40 @@ impl AgentTool for StreamingEditFileTool { cx: &mut App, ) -> Task> { cx.spawn(async move |cx: &mut AsyncApp| { - let mut state: Option = None; - let mut last_partial: Option = None; - loop { - futures::select! { - partial = input.recv_partial().fuse() => { - let Some(partial_value) = partial else { break }; - if let Ok(parsed) = serde_json::from_value::(partial_value) { - let path_complete = parsed.path.is_some() - && parsed.path.as_ref() == last_partial.as_ref().and_then(|p| p.path.as_ref()); - - last_partial = Some(parsed.clone()); - - if state.is_none() - && path_complete - && let StreamingEditFileToolPartialInput { - path: Some(path), - display_description: Some(display_description), - mode: Some(mode), - .. - } = &parsed - { - match EditSession::new( - &PathBuf::from(path), - display_description, - *mode, - &self, - &event_stream, - cx, - ) - .await - { - Ok(session) => state = Some(session), - Err(e) => { - log::error!("Failed to create edit session: {}", e); - return Err(e); - } - } - } - - if let Some(state) = &mut state { - if let Err(e) = state.process(parsed, &self, &event_stream, cx) { - log::error!("Failed to process edit: {}", e); - return Err(e); - } - } - } - } - _ = event_stream.cancelled_by_user().fuse() => { - return Err(StreamingEditFileToolOutput::error("Edit cancelled by user")); - } - } - } - let full_input = - input - .recv() - .await - .map_err(|e| { - let err = StreamingEditFileToolOutput::error(format!("Failed to receive tool input: {e}")); - log::error!("Failed to receive tool input: {e}"); - err - })?; - - let mut state = if let Some(state) = state { - state - } else { - match EditSession::new( - &full_input.path, - &full_input.display_description, - full_input.mode, - &self, - &event_stream, - cx, - ) + match self + .process_streaming_edits(&mut input, &event_stream, cx) .await - { - Ok(session) => session, - Err(e) => { - log::error!("Failed to create edit session: {}", e); - return Err(e); - } + { + EditSessionResult::Completed(session) => { + self.ensure_buffer_saved(&session.buffer, cx).await; + let (new_text, diff) = session.compute_new_text_and_diff(cx).await; + Ok(StreamingEditFileToolOutput::Success { + old_text: session.old_text.clone(), + new_text, + input_path: session.input_path, + diff, + }) } - }; - match state.finalize(full_input, &self, &event_stream, cx).await { - Ok(output) => Ok(output), - Err(e) => { - log::error!("Failed to finalize edit: {}", e); - Err(e) + EditSessionResult::Failed { + error, + session: Some(session), + } => { + self.ensure_buffer_saved(&session.buffer, cx).await; + let (_new_text, diff) = session.compute_new_text_and_diff(cx).await; + Err(StreamingEditFileToolOutput::Error { + error, + input_path: Some(session.input_path), + diff, + }) } + EditSessionResult::Failed { + error, + session: None, + } => Err(StreamingEditFileToolOutput::Error { + error, + input_path: None, + diff: String::new(), + }), } }) } @@ -472,6 +602,7 @@ impl AgentTool for StreamingEditFileTool { pub struct EditSession { abs_path: PathBuf, + input_path: PathBuf, buffer: Entity, old_text: Arc, diff: Entity, @@ -518,23 +649,21 @@ impl EditPipeline { impl EditSession { async fn new( - path: &PathBuf, + path: PathBuf, display_description: &str, mode: StreamingEditFileMode, tool: &StreamingEditFileTool, event_stream: &ToolCallEventStream, cx: &mut AsyncApp, - ) -> Result { - let project_path = cx - .update(|cx| resolve_path(mode, &path, &tool.project, cx)) - .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; + ) -> Result { + let project_path = cx.update(|cx| resolve_path(mode, &path, &tool.project, cx))?; let Some(abs_path) = cx.update(|cx| tool.project.read(cx).absolute_path(&project_path, cx)) else { - return Err(StreamingEditFileToolOutput::error(format!( + return Err(format!( "Worktree at '{}' does not exist", path.to_string_lossy() - ))); + )); }; event_stream.update_fields( @@ -543,13 +672,13 @@ impl EditSession { cx.update(|cx| tool.authorize(&path, &display_description, event_stream, cx)) .await - .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; + .map_err(|e| e.to_string())?; let buffer = tool .project .update(cx, |project, cx| project.open_buffer(project_path, cx)) .await - .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; + .map_err(|e| e.to_string())?; ensure_buffer_saved(&buffer, &abs_path, tool, cx)?; @@ -578,6 +707,7 @@ impl EditSession { Ok(Self { abs_path, + input_path: path, buffer, old_text, diff, @@ -594,22 +724,20 @@ impl EditSession { tool: &StreamingEditFileTool, event_stream: &ToolCallEventStream, cx: &mut AsyncApp, - ) -> Result { - let old_text = self.old_text.clone(); - + ) -> Result<(), String> { match input.mode { StreamingEditFileMode::Write => { - let content = input.content.ok_or_else(|| { - StreamingEditFileToolOutput::error("'content' field is required for write mode") - })?; + let content = input + .content + .ok_or_else(|| "'content' field is required for write mode".to_string())?; let events = self.parser.finalize_content(&content); self.process_events(&events, tool, event_stream, cx)?; } StreamingEditFileMode::Edit => { - let edits = input.edits.ok_or_else(|| { - StreamingEditFileToolOutput::error("'edits' field is required for edit mode") - })?; + let edits = input + .edits + .ok_or_else(|| "'edits' field is required for edit mode".to_string())?; let events = self.parser.finalize_edits(&edits); self.process_events(&events, tool, event_stream, cx)?; @@ -625,53 +753,15 @@ impl EditSession { } } } + Ok(()) + } - let format_on_save_enabled = self.buffer.read_with(cx, |buffer, cx| { - let settings = language_settings::LanguageSettings::for_buffer(buffer, cx); - settings.format_on_save != FormatOnSave::Off - }); - - if format_on_save_enabled { - tool.action_log.update(cx, |log, cx| { - log.buffer_edited(self.buffer.clone(), cx); - }); - - let format_task = tool.project.update(cx, |project, cx| { - project.format( - HashSet::from_iter([self.buffer.clone()]), - LspFormatTarget::Buffers, - false, - FormatTrigger::Save, - cx, - ) - }); - futures::select! { - result = format_task.fuse() => { result.log_err(); }, - _ = event_stream.cancelled_by_user().fuse() => { - return Err(StreamingEditFileToolOutput::error("Edit cancelled by user")); - } - }; - } - - let save_task = tool.project.update(cx, |project, cx| { - project.save_buffer(self.buffer.clone(), cx) - }); - futures::select! { - result = save_task.fuse() => { result.map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; }, - _ = event_stream.cancelled_by_user().fuse() => { - return Err(StreamingEditFileToolOutput::error("Edit cancelled by user")); - } - }; - - tool.action_log.update(cx, |log, cx| { - log.buffer_edited(self.buffer.clone(), cx); - }); - + async fn compute_new_text_and_diff(&self, cx: &mut AsyncApp) -> (String, String) { let new_snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); let (new_text, unified_diff) = cx .background_spawn({ let new_snapshot = new_snapshot.clone(); - let old_text = old_text.clone(); + let old_text = self.old_text.clone(); async move { let new_text = new_snapshot.text(); let diff = language::unified_diff(&old_text, &new_text); @@ -679,14 +769,7 @@ impl EditSession { } }) .await; - - let output = StreamingEditFileToolOutput::Success { - input_path: input.path, - new_text, - old_text: old_text.clone(), - diff: unified_diff, - }; - Ok(output) + (new_text, unified_diff) } fn process( @@ -695,7 +778,7 @@ impl EditSession { tool: &StreamingEditFileTool, event_stream: &ToolCallEventStream, cx: &mut AsyncApp, - ) -> Result<(), StreamingEditFileToolOutput> { + ) -> Result<(), String> { match &self.mode { StreamingEditFileMode::Write => { if let Some(content) = &partial.content { @@ -719,7 +802,7 @@ impl EditSession { tool: &StreamingEditFileTool, event_stream: &ToolCallEventStream, cx: &mut AsyncApp, - ) -> Result<(), StreamingEditFileToolOutput> { + ) -> Result<(), String> { for event in events { match event { ToolEditEvent::ContentChunk { chunk } => { @@ -760,7 +843,7 @@ impl EditSession { { if let Some(match_range) = matcher.push(chunk, None) { let anchor_range = self.buffer.read_with(cx, |buffer, _cx| { - buffer.anchor_range_between(match_range.clone()) + buffer.anchor_range_outside(match_range.clone()) }); self.diff .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); @@ -795,7 +878,7 @@ impl EditSession { let anchor_range = self .buffer - .read_with(cx, |buffer, _cx| buffer.anchor_range_between(range.clone())); + .read_with(cx, |buffer, _cx| buffer.anchor_range_outside(range.clone())); self.diff .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); @@ -953,7 +1036,7 @@ fn apply_char_operations( } CharOperation::Delete { bytes } => { let delete_end = *edit_cursor + bytes; - let anchor_range = snapshot.anchor_range_around(*edit_cursor..delete_end); + let anchor_range = snapshot.anchor_range_inside(*edit_cursor..delete_end); agent_edit_buffer(&buffer, [(anchor_range, "")], action_log, cx); *edit_cursor = delete_end; } @@ -969,14 +1052,14 @@ fn extract_match( buffer: &Entity, edit_index: &usize, cx: &mut AsyncApp, -) -> Result, StreamingEditFileToolOutput> { +) -> Result, String> { match matches.len() { - 0 => Err(StreamingEditFileToolOutput::error(format!( + 0 => Err(format!( "Could not find matching text for edit at index {}. \ The old_text did not match any content in the file. \ Please read the file again to get the current content.", edit_index, - ))), + )), 1 => Ok(matches.into_iter().next().unwrap()), _ => { let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); @@ -985,12 +1068,12 @@ fn extract_match( .map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string()) .collect::>() .join(", "); - Err(StreamingEditFileToolOutput::error(format!( + Err(format!( "Edit {} matched multiple locations in the file at lines: {}. \ Please provide more context in old_text to uniquely \ identify the location.", edit_index, lines - ))) + )) } } } @@ -1022,7 +1105,7 @@ fn ensure_buffer_saved( abs_path: &PathBuf, tool: &StreamingEditFileTool, cx: &mut AsyncApp, -) -> Result<(), StreamingEditFileToolOutput> { +) -> Result<(), String> { let last_read_mtime = tool .action_log .read_with(cx, |log, _| log.file_read_time(abs_path)); @@ -1063,15 +1146,14 @@ fn ensure_buffer_saved( then ask them to save or revert the file manually and inform you when it's ok to proceed." } }; - return Err(StreamingEditFileToolOutput::error(message)); + return Err(message.to_string()); } if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) { if current != last_read { - return Err(StreamingEditFileToolOutput::error( - "The file has been modified since you last read it. \ - Please read the file again to get the current state before editing it.", - )); + return Err("The file has been modified since you last read it. \ + Please read the file again to get the current state before editing it." + .to_string()); } } @@ -1083,56 +1165,63 @@ fn resolve_path( path: &PathBuf, project: &Entity, cx: &mut App, -) -> Result { +) -> Result { let project = project.read(cx); match mode { StreamingEditFileMode::Edit => { let path = project .find_project_path(&path, cx) - .context("Can't edit file: path not found")?; + .ok_or_else(|| "Can't edit file: path not found".to_string())?; let entry = project .entry_for_path(&path, cx) - .context("Can't edit file: path not found")?; + .ok_or_else(|| "Can't edit file: path not found".to_string())?; - anyhow::ensure!(entry.is_file(), "Can't edit file: path is a directory"); - Ok(path) + if entry.is_file() { + Ok(path) + } else { + Err("Can't edit file: path is a directory".to_string()) + } } StreamingEditFileMode::Write => { if let Some(path) = project.find_project_path(&path, cx) && let Some(entry) = project.entry_for_path(&path, cx) { - anyhow::ensure!(entry.is_file(), "Can't write to file: path is a directory"); - return Ok(path); + if entry.is_file() { + return Ok(path); + } else { + return Err("Can't write to file: path is a directory".to_string()); + } } - let parent_path = path.parent().context("Can't create file: incorrect path")?; + let parent_path = path + .parent() + .ok_or_else(|| "Can't create file: incorrect path".to_string())?; let parent_project_path = project.find_project_path(&parent_path, cx); let parent_entry = parent_project_path .as_ref() .and_then(|path| project.entry_for_path(path, cx)) - .context("Can't create file: parent directory doesn't exist")?; + .ok_or_else(|| "Can't create file: parent directory doesn't exist")?; - anyhow::ensure!( - parent_entry.is_dir(), - "Can't create file: parent is not a directory" - ); + if !parent_entry.is_dir() { + return Err("Can't create file: parent is not a directory".to_string()); + } let file_name = path .file_name() .and_then(|file_name| file_name.to_str()) .and_then(|file_name| RelPath::unix(file_name).ok()) - .context("Can't create file: invalid filename")?; + .ok_or_else(|| "Can't create file: invalid filename".to_string())?; let new_file_path = parent_project_path.map(|parent| ProjectPath { path: parent.path.join(file_name), ..parent }); - new_file_path.context("Can't create file") + new_file_path.ok_or_else(|| "Can't create file".to_string()) } } } @@ -1382,10 +1471,17 @@ mod tests { }) .await; - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { + let StreamingEditFileToolOutput::Error { + error, + diff, + input_path, + } = result.unwrap_err() + else { panic!("expected error"); }; assert_eq!(error, "Can't edit file: path not found"); + assert!(diff.is_empty()); + assert_eq!(input_path, None); } #[gpui::test] @@ -1411,7 +1507,7 @@ mod tests { }) .await; - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { + let StreamingEditFileToolOutput::Error { error, .. } = result.unwrap_err() else { panic!("expected error"); }; assert!( @@ -1424,7 +1520,7 @@ mod tests { async fn test_streaming_early_buffer_open(cx: &mut TestAppContext) { let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -1447,7 +1543,7 @@ mod tests { cx.run_until_parked(); // Now send the final complete input - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit", @@ -1465,7 +1561,7 @@ mod tests { async fn test_streaming_path_completeness_heuristic(cx: &mut TestAppContext) { let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"file.txt": "hello world"})).await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -1485,7 +1581,7 @@ mod tests { cx.run_until_parked(); // Send final - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Overwrite file", "path": "root/file.txt", "mode": "write", @@ -1503,7 +1599,7 @@ mod tests { async fn test_streaming_cancellation_during_partials(cx: &mut TestAppContext) { let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"file.txt": "hello world"})).await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver, mut cancellation_tx) = ToolCallEventStream::test_with_cancellation(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -1521,7 +1617,7 @@ mod tests { drop(sender); let result = task.await; - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { + let StreamingEditFileToolOutput::Error { error, .. } = result.unwrap_err() else { panic!("expected error"); }; assert!( @@ -1537,7 +1633,7 @@ mod tests { json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), ) .await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -1578,7 +1674,7 @@ mod tests { cx.run_until_parked(); // Send final complete input - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Edit multiple lines", "path": "root/file.txt", "mode": "edit", @@ -1601,7 +1697,7 @@ mod tests { #[gpui::test] async fn test_streaming_create_file_with_partials(cx: &mut TestAppContext) { let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -1625,7 +1721,7 @@ mod tests { cx.run_until_parked(); // Final with full content - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Create new file", "path": "root/dir/new_file.txt", "mode": "write", @@ -1643,12 +1739,12 @@ mod tests { async fn test_streaming_no_partials_direct_final(cx: &mut TestAppContext) { let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Send final immediately with no partials (simulates non-streaming path) - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit", @@ -1669,7 +1765,7 @@ mod tests { json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}), ) .await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -1739,7 +1835,7 @@ mod tests { ); // Send final complete input - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Edit multiple lines", "path": "root/file.txt", "mode": "edit", @@ -1767,7 +1863,7 @@ mod tests { async fn test_streaming_incremental_three_edits(cx: &mut TestAppContext) { let (tool, project, _action_log, _fs, _thread) = setup_test(cx, json!({"file.txt": "aaa\nbbb\nccc\nddd\neee\n"})).await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -1835,7 +1931,7 @@ mod tests { assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nCCC\nddd\nEEEeee\n")); // Send final - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Edit three lines", "path": "root/file.txt", "mode": "edit", @@ -1857,7 +1953,7 @@ mod tests { async fn test_streaming_edit_failure_mid_stream(cx: &mut TestAppContext) { let (tool, project, _action_log, _fs, _thread) = setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -1893,16 +1989,17 @@ mod tests { })); cx.run_until_parked(); - // Verify edit 1 was applied - let buffer_text = project.update(cx, |project, cx| { + let buffer = project.update(cx, |project, cx| { let pp = project .find_project_path(&PathBuf::from("root/file.txt"), cx) .unwrap(); - project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) + project.get_open_buffer(&pp, cx).unwrap() }); + + // Verify edit 1 was applied + let buffer_text = buffer.read_with(cx, |buffer, _cx| buffer.text()); assert_eq!( - buffer_text.as_deref(), - Some("MODIFIED\nline 2\nline 3\n"), + buffer_text, "MODIFIED\nline 2\nline 3\n", "First edit should be applied even though second edit will fail" ); @@ -1925,20 +2022,32 @@ mod tests { drop(sender); let result = task.await; - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { + let StreamingEditFileToolOutput::Error { + error, + diff, + input_path, + } = result.unwrap_err() + else { panic!("expected error"); }; + assert!( error.contains("Could not find matching text for edit at index 1"), "Expected error about edit 1 failing, got: {error}" ); + // Ensure that first edit was applied successfully and that we saved the buffer + assert_eq!(input_path, Some(PathBuf::from("root/file.txt"))); + assert_eq!( + diff, + "@@ -1,3 +1,3 @@\n-line 1\n+MODIFIED\n line 2\n line 3\n" + ); } #[gpui::test] async fn test_streaming_single_edit_no_incremental(cx: &mut TestAppContext) { let (tool, project, _action_log, _fs, _thread) = setup_test(cx, json!({"file.txt": "hello world\n"})).await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -1975,7 +2084,7 @@ mod tests { ); // Send final — the edit is applied during finalization - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Single edit", "path": "root/file.txt", "mode": "edit", @@ -1993,7 +2102,7 @@ mod tests { async fn test_streaming_input_partials_then_final(cx: &mut TestAppContext) { let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await; - let (sender, input): (ToolInputSender, ToolInput) = + let (mut sender, input): (ToolInputSender, ToolInput) = ToolInput::test(); let (event_stream, _event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -2020,7 +2129,7 @@ mod tests { cx.run_until_parked(); // Send the final complete input - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Edit lines", "path": "root/file.txt", "mode": "edit", @@ -2038,7 +2147,7 @@ mod tests { async fn test_streaming_input_sender_dropped_before_final(cx: &mut TestAppContext) { let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"file.txt": "hello world\n"})).await; - let (sender, input): (ToolInputSender, ToolInput) = + let (mut sender, input): (ToolInputSender, ToolInput) = ToolInput::test(); let (event_stream, _event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -2064,7 +2173,7 @@ mod tests { // Create a channel and send multiple partials before a final, then use // ToolInput::resolved-style immediate delivery to confirm recv() works // when partials are already buffered. - let (sender, input): (ToolInputSender, ToolInput) = + let (mut sender, input): (ToolInputSender, ToolInput) = ToolInput::test(); let (event_stream, _event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -2077,7 +2186,7 @@ mod tests { "path": "root/dir/new.txt", "mode": "write" })); - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Create", "path": "root/dir/new.txt", "mode": "write", @@ -2109,13 +2218,13 @@ mod tests { let result = test_resolve_path(&mode, "root/dir/subdir", cx); assert_eq!( - result.await.unwrap_err().to_string(), + result.await.unwrap_err(), "Can't write to file: path is a directory" ); let result = test_resolve_path(&mode, "root/dir/nonexistent_dir/new.txt", cx); assert_eq!( - result.await.unwrap_err().to_string(), + result.await.unwrap_err(), "Can't create file: parent directory doesn't exist" ); } @@ -2133,14 +2242,11 @@ mod tests { assert_resolved_path_eq(result.await, rel_path(path_without_root)); let result = test_resolve_path(&mode, "root/nonexistent.txt", cx); - assert_eq!( - result.await.unwrap_err().to_string(), - "Can't edit file: path not found" - ); + assert_eq!(result.await.unwrap_err(), "Can't edit file: path not found"); let result = test_resolve_path(&mode, "root/dir", cx); assert_eq!( - result.await.unwrap_err().to_string(), + result.await.unwrap_err(), "Can't edit file: path is a directory" ); } @@ -2149,7 +2255,7 @@ mod tests { mode: &StreamingEditFileMode, path: &str, cx: &mut TestAppContext, - ) -> anyhow::Result { + ) -> Result { init_test(cx); let fs = project::FakeFs::new(cx.executor()); @@ -2170,7 +2276,7 @@ mod tests { } #[track_caller] - fn assert_resolved_path_eq(path: anyhow::Result, expected: &RelPath) { + fn assert_resolved_path_eq(path: Result, expected: &RelPath) { let actual = path.expect("Should return valid path").path; assert_eq!(actual.as_ref(), expected); } @@ -2259,7 +2365,7 @@ mod tests { }); // Use streaming pattern so executor can pump the LSP request/response - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -2271,7 +2377,7 @@ mod tests { })); cx.run_until_parked(); - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Create main function", "path": "root/src/main.rs", "mode": "write", @@ -2310,7 +2416,7 @@ mod tests { }); }); - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let tool2 = Arc::new(StreamingEditFileTool::new( @@ -2329,7 +2435,7 @@ mod tests { })); cx.run_until_parked(); - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Update main function", "path": "root/src/main.rs", "mode": "write", @@ -2493,7 +2599,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // Test 4: Path with .zed in the middle should require confirmation let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); @@ -2540,7 +2646,7 @@ mod tests { cx.update(|cx| tool.authorize(&PathBuf::from("/etc/hosts"), "test 5.2", &stream_tx, cx)) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // 5.3: Normal in-project path with allow — no confirmation needed let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); @@ -2554,7 +2660,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); // 5.4: With Confirm default, non-project paths still prompt cx.update(|cx| { @@ -2767,8 +2873,8 @@ mod tests { assert!(result.is_err(), "Tool should fail when policy denies"); assert!( !matches!( - stream_rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + stream_rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "Deny policy should not emit symlink authorization prompt", ); @@ -2810,7 +2916,7 @@ mod tests { } else { auth.await.unwrap(); assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -2887,7 +2993,7 @@ mod tests { } else { auth.await.unwrap(); assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -2947,7 +3053,7 @@ mod tests { stream_rx.expect_authorization().await; } else { assert!( - stream_rx.try_next().is_err(), + stream_rx.try_recv().is_err(), "Failed for case: {} - path: {} - expected no confirmation but got one", description, path @@ -3015,7 +3121,7 @@ mod tests { }) .await .unwrap(); - assert!(stream_rx.try_next().is_err()); + assert!(stream_rx.try_recv().is_err()); } } @@ -3288,14 +3394,22 @@ mod tests { }) .await; - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { + let StreamingEditFileToolOutput::Error { + error, + diff, + input_path, + } = result.unwrap_err() + else { panic!("expected error"); }; + assert!( error.contains("has been modified since you last read it"), "Error should mention file modification, got: {}", error ); + assert!(diff.is_empty()); + assert!(input_path.is_none()); } #[gpui::test] @@ -3362,7 +3476,12 @@ mod tests { }) .await; - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { + let StreamingEditFileToolOutput::Error { + error, + diff, + input_path, + } = result.unwrap_err() + else { panic!("expected error"); }; assert!( @@ -3380,6 +3499,8 @@ mod tests { "Error should ask user to manually save or revert when tools aren't available, got: {}", error ); + assert!(diff.is_empty()); + assert!(input_path.is_none()); } #[gpui::test] @@ -3390,7 +3511,7 @@ mod tests { // the modified buffer and succeeds. let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"file.txt": "aaa\nbbb\nccc\nddd\neee\n"})).await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -3420,7 +3541,7 @@ mod tests { cx.run_until_parked(); // Send the final input with all three edits. - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Overlapping edits", "path": "root/file.txt", "mode": "edit", @@ -3441,7 +3562,7 @@ mod tests { #[gpui::test] async fn test_streaming_create_content_streamed(cx: &mut TestAppContext) { let (tool, project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -3495,7 +3616,7 @@ mod tests { ); // Send final input - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Create new file", "path": "root/dir/new_file.txt", "mode": "write", @@ -3516,7 +3637,7 @@ mod tests { json!({"file.txt": "old line 1\nold line 2\nold line 3\n"}), ) .await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, mut receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -3559,7 +3680,7 @@ mod tests { }); // Send final input - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Overwrite file", "path": "root/file.txt", "mode": "write", @@ -3587,7 +3708,7 @@ mod tests { json!({"file.txt": "old line 1\nold line 2\nold line 3\n"}), ) .await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -3634,7 +3755,7 @@ mod tests { ); // Send final input with complete content - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Overwrite file", "path": "root/file.txt", "mode": "write", @@ -3656,7 +3777,7 @@ mod tests { async fn test_streaming_edit_json_fixer_escape_corruption(cx: &mut TestAppContext) { let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"file.txt": "hello\nworld\nfoo\n"})).await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -3690,7 +3811,7 @@ mod tests { cx.run_until_parked(); // Send final. - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Edit", "path": "root/file.txt", "mode": "edit", @@ -3708,7 +3829,7 @@ mod tests { async fn test_streaming_final_input_stringified_edits_succeeds(cx: &mut TestAppContext) { let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"file.txt": "hello\nworld\n"})).await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -3719,7 +3840,7 @@ mod tests { })); cx.run_until_parked(); - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Edit", "path": "root/file.txt", "mode": "edit", @@ -3823,7 +3944,7 @@ mod tests { ) { let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"file.txt": "old_content"})).await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -3849,7 +3970,7 @@ mod tests { cx.run_until_parked(); // Send final. - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Overwrite file", "mode": "write", "content": "new_content", @@ -3869,7 +3990,7 @@ mod tests { ) { let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"file.txt": "old_content"})).await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); @@ -3902,7 +4023,7 @@ mod tests { cx.run_until_parked(); // Send final. - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Overwrite file", "mode": "edit", "edits": [{"old_text": "old_content", "new_text": "new_content"}], @@ -3939,11 +4060,11 @@ mod tests { let old_text = "}\n\n\n\nfn render_search"; let new_text = "}\n\nfn render_search"; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "Remove extra blank lines", "path": "root/file.rs", "mode": "edit", @@ -3980,11 +4101,11 @@ mod tests { let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"file.rs": file_content})).await; - let (sender, input) = ToolInput::::test(); + let (mut sender, input) = ToolInput::::test(); let (event_stream, _receiver) = ToolCallEventStream::test(); let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); - sender.send_final(json!({ + sender.send_full(json!({ "display_description": "description", "path": "root/file.rs", "mode": "edit", diff --git a/crates/agent/src/tools/terminal_tool.rs b/crates/agent/src/tools/terminal_tool.rs index 82bf9a06480bb7d6db3611516281f42452ec5137..f36bd0fe3d3fb00931a7dc272d76eb042f6570f6 100644 --- a/crates/agent/src/tools/terminal_tool.rs +++ b/crates/agent/src/tools/terminal_tool.rs @@ -681,17 +681,17 @@ mod tests { ); assert!( !matches!( - rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "invalid command should not request authorization" ); assert!( !matches!( - rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallUpdate( + rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallUpdate( acp_thread::ToolCallUpdate::UpdateFields(_) - )))) + ))) ), "invalid command should not emit a terminal card update" ); @@ -810,8 +810,8 @@ mod tests { ); assert!( !matches!( - rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "hardcoded denial should not request authorization" ); @@ -1058,8 +1058,8 @@ mod tests { ); assert!( !matches!( - rx.try_next(), - Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + rx.try_recv(), + Ok(Ok(crate::ThreadEvent::ToolCallAuthorization(_))) ), "rejected command {command:?} should not request authorization" ); diff --git a/crates/agent_servers/Cargo.toml b/crates/agent_servers/Cargo.toml index 1542466be35bbce80983a73a3fc2e0998799160c..7151f0084b1cb7d9b206f57551ce715ef67483f7 100644 --- a/crates/agent_servers/Cargo.toml +++ b/crates/agent_servers/Cargo.toml @@ -32,7 +32,6 @@ futures.workspace = true gpui.workspace = true feature_flags.workspace = true gpui_tokio = { workspace = true, optional = true } -credentials_provider.workspace = true google_ai.workspace = true http_client.workspace = true indoc.workspace = true @@ -53,6 +52,7 @@ terminal.workspace = true uuid.workspace = true util.workspace = true watch.workspace = true +zed_credentials_provider.workspace = true [target.'cfg(unix)'.dependencies] libc.workspace = true diff --git a/crates/agent_servers/src/custom.rs b/crates/agent_servers/src/custom.rs index 0dcd2240d6ecf6dc052cdd55953cff8ec1442eae..fb8d0a515244576d2cf02e4989cbd71beca448c7 100644 --- a/crates/agent_servers/src/custom.rs +++ b/crates/agent_servers/src/custom.rs @@ -3,7 +3,6 @@ use acp_thread::AgentConnection; use agent_client_protocol as acp; use anyhow::{Context as _, Result}; use collections::HashSet; -use credentials_provider::CredentialsProvider; use fs::Fs; use gpui::{App, AppContext as _, Entity, Task}; use language_model::{ApiKey, EnvVar}; @@ -392,7 +391,7 @@ fn api_key_for_gemini_cli(cx: &mut App) -> Task> { if let Some(key) = env_var.value { return Task::ready(Ok(key)); } - let credentials_provider = ::global(cx); + let credentials_provider = zed_credentials_provider::global(cx); let api_url = google_ai::API_URL.to_string(); cx.spawn(async move |cx| { Ok( diff --git a/crates/agent_servers/src/e2e_tests.rs b/crates/agent_servers/src/e2e_tests.rs index 956d106df2a260bd2eb31c14f4f1f1705bf74cd6..aa29a0c230c13949b15f2b39a245ae41ead4884d 100644 --- a/crates/agent_servers/src/e2e_tests.rs +++ b/crates/agent_servers/src/e2e_tests.rs @@ -1,6 +1,7 @@ use crate::{AgentServer, AgentServerDelegate}; use acp_thread::{AcpThread, AgentThreadEntry, ToolCall, ToolCallStatus}; use agent_client_protocol as acp; +use client::RefreshLlmTokenListener; use futures::{FutureExt, StreamExt, channel::mpsc, select}; use gpui::AppContext; use gpui::{Entity, TestAppContext}; @@ -413,7 +414,8 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc { cx.set_http_client(Arc::new(http_client)); let client = client::Client::production(cx); let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx)); - language_model::init(user_store, client, cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store, cx); #[cfg(test)] project::agent_server_store::AllAgentServersSettings::override_global( diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index 2ef65fe33641cdeca1a77642251523275511e81f..0c68d2f25d54f966d1cc0a93476457bbba79c959 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -13,8 +13,8 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{ DockPosition, DockSide, LanguageModelParameters, LanguageModelSelection, NewThreadLocation, - NotifyWhenAgentWaiting, RegisterSetting, Settings, SettingsContent, SettingsStore, - SidebarDockPosition, SidebarSide, ThinkingBlockDisplay, ToolPermissionMode, + NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, RegisterSetting, Settings, SettingsContent, + SettingsStore, SidebarDockPosition, SidebarSide, ThinkingBlockDisplay, ToolPermissionMode, update_settings_file, }; @@ -165,7 +165,7 @@ pub struct AgentSettings { pub profiles: IndexMap, pub notify_when_agent_waiting: NotifyWhenAgentWaiting, - pub play_sound_when_agent_done: bool, + pub play_sound_when_agent_done: PlaySoundWhenAgentDone, pub single_file_review: bool, pub model_parameters: Vec, pub enable_feedback: bool, @@ -176,6 +176,7 @@ pub struct AgentSettings { pub use_modifier_to_send: bool, pub message_editor_min_lines: usize, pub show_turn_stats: bool, + pub show_merge_conflict_indicator: bool, pub tool_permissions: ToolPermissions, pub new_thread_location: NewThreadLocation, } @@ -618,7 +619,7 @@ impl Settings for AgentSettings { .collect(), notify_when_agent_waiting: agent.notify_when_agent_waiting.unwrap(), - play_sound_when_agent_done: agent.play_sound_when_agent_done.unwrap(), + play_sound_when_agent_done: agent.play_sound_when_agent_done.unwrap_or_default(), single_file_review: agent.single_file_review.unwrap(), model_parameters: agent.model_parameters, enable_feedback: agent.enable_feedback.unwrap(), @@ -629,6 +630,7 @@ impl Settings for AgentSettings { use_modifier_to_send: agent.use_modifier_to_send.unwrap(), message_editor_min_lines: agent.message_editor_min_lines.unwrap(), show_turn_stats: agent.show_turn_stats.unwrap(), + show_merge_conflict_indicator: agent.show_merge_conflict_indicator.unwrap(), tool_permissions: compile_tool_permissions(agent.tool_permissions), new_thread_location: agent.new_thread_location.unwrap_or_default(), } diff --git a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs index 4e3dd63b0337f9be54b550f4f4a6a5ca2e7cdd42..e0df79ba4dfe226652818b120b7bfcc493c73b1e 100644 --- a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs +++ b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs @@ -202,6 +202,7 @@ impl ModelInput { .text(cx) .parse::() .map_err(|_| SharedString::from("Max Tokens must be a number"))?, + reasoning_effort: None, capabilities: ModelCapabilities { tools: self.capabilities.supports_tools.selected(), images: self.capabilities.supports_images.selected(), @@ -815,7 +816,7 @@ mod tests { cx.set_global(store); theme_settings::init(theme::LoadThemes::JustBase, cx); - language_model::init_settings(cx); + language_model::init(cx); editor::init(cx); }); diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index 2e709c0be3297e270119c048c7b8e25e7958ee69..7b70740dd1ac462614a9d08d9e48d7d13ac2ed32 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -138,11 +138,12 @@ impl AgentDiffPane { path_a.cmp(&path_b) }); - let mut paths_to_delete = self + let mut buffers_to_delete = self .multibuffer .read(cx) - .paths() - .cloned() + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) .collect::>(); for (buffer, diff_handle) in sorted_buffers { @@ -151,7 +152,7 @@ impl AgentDiffPane { } let path_key = PathKey::for_buffer(&buffer, cx); - paths_to_delete.remove(&path_key); + buffers_to_delete.remove(&buffer.read(cx).remote_id()); let snapshot = buffer.read(cx).snapshot(); @@ -168,7 +169,7 @@ impl AgentDiffPane { let (was_empty, is_excerpt_newly_added) = self.multibuffer.update(cx, |multibuffer, cx| { let was_empty = multibuffer.is_empty(); - let (_, is_excerpt_newly_added) = multibuffer.set_excerpts_for_path( + let is_excerpt_newly_added = multibuffer.update_excerpts_for_path( path_key.clone(), buffer.clone(), diff_hunk_ranges, @@ -183,13 +184,13 @@ impl AgentDiffPane { if was_empty { let first_hunk = editor .diff_hunks_in_ranges( - &[editor::Anchor::min()..editor::Anchor::max()], + &[editor::Anchor::Min..editor::Anchor::Max], &self.multibuffer.read(cx).read(cx), ) .next(); if let Some(first_hunk) = first_hunk { - let first_hunk_start = first_hunk.multi_buffer_range().start; + let first_hunk_start = first_hunk.multi_buffer_range.start; editor.change_selections(Default::default(), window, cx, |selections| { selections.select_anchor_ranges([first_hunk_start..first_hunk_start]); }) @@ -208,8 +209,8 @@ impl AgentDiffPane { } self.multibuffer.update(cx, |multibuffer, cx| { - for path in paths_to_delete { - multibuffer.remove_excerpts_for_path(path, cx); + for buffer_id in buffers_to_delete { + multibuffer.remove_excerpts_for_buffer(buffer_id, cx); } }); @@ -239,13 +240,13 @@ impl AgentDiffPane { self.editor.update(cx, |editor, cx| { let first_hunk = editor .diff_hunks_in_ranges( - &[position..editor::Anchor::max()], + &[position..editor::Anchor::Max], &self.multibuffer.read(cx).read(cx), ) .next(); if let Some(first_hunk) = first_hunk { - let first_hunk_start = first_hunk.multi_buffer_range().start; + let first_hunk_start = first_hunk.multi_buffer_range.start; editor.change_selections(Default::default(), window, cx, |selections| { selections.select_anchor_ranges([first_hunk_start..first_hunk_start]); }) @@ -282,7 +283,7 @@ impl AgentDiffPane { editor, &snapshot, &self.thread, - vec![editor::Anchor::min()..editor::Anchor::max()], + vec![editor::Anchor::Min..editor::Anchor::Max], self.workspace.clone(), window, cx, @@ -451,20 +452,20 @@ fn update_editor_selection( diff_hunks .last() .and_then(|last_kept_hunk| { - let last_kept_hunk_end = last_kept_hunk.multi_buffer_range().end; + let last_kept_hunk_end = last_kept_hunk.multi_buffer_range.end; editor .diff_hunks_in_ranges( - &[last_kept_hunk_end..editor::Anchor::max()], + &[last_kept_hunk_end..editor::Anchor::Max], buffer_snapshot, ) .nth(1) }) .or_else(|| { let first_kept_hunk = diff_hunks.first()?; - let first_kept_hunk_start = first_kept_hunk.multi_buffer_range().start; + let first_kept_hunk_start = first_kept_hunk.multi_buffer_range.start; editor .diff_hunks_in_ranges( - &[editor::Anchor::min()..first_kept_hunk_start], + &[editor::Anchor::Min..first_kept_hunk_start], buffer_snapshot, ) .next() @@ -473,7 +474,7 @@ fn update_editor_selection( if let Some(target_hunk) = target_hunk { editor.change_selections(Default::default(), window, cx, |selections| { - let next_hunk_start = target_hunk.multi_buffer_range().start; + let next_hunk_start = target_hunk.multi_buffer_range.start; selections.select_anchor_ranges([next_hunk_start..next_hunk_start]); }) } @@ -1567,7 +1568,7 @@ impl AgentDiff { editor.update(cx, |editor, cx| { let snapshot = multibuffer.read(cx).snapshot(cx); if let Some(first_hunk) = snapshot.diff_hunks().next() { - let first_hunk_start = first_hunk.multi_buffer_range().start; + let first_hunk_start = first_hunk.multi_buffer_range.start; editor.change_selections( SelectionEffects::scroll(Autoscroll::center()), @@ -1648,7 +1649,7 @@ impl AgentDiff { editor, &snapshot, thread, - vec![editor::Anchor::min()..editor::Anchor::max()], + vec![editor::Anchor::Min..editor::Anchor::Max], window, cx, ); @@ -1669,7 +1670,7 @@ impl AgentDiff { editor, &snapshot, thread, - vec![editor::Anchor::min()..editor::Anchor::max()], + vec![editor::Anchor::Min..editor::Anchor::Max], workspace.clone(), window, cx, @@ -1808,7 +1809,7 @@ mod tests { cx.set_global(settings_store); prompt_store::init(cx); theme_settings::init(theme::LoadThemes::JustBase, cx); - language_model::init_settings(cx); + language_model::init(cx); }); let fs = FakeFs::new(cx.executor()); @@ -1965,7 +1966,7 @@ mod tests { cx.set_global(settings_store); prompt_store::init(cx); theme_settings::init(theme::LoadThemes::JustBase, cx); - language_model::init_settings(cx); + language_model::init(cx); workspace::register_project_item::(cx); }); diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 0ed0aeb78bf8889136a479ed2dac5caba633db55..41900e71e5d3ad7e5327ee7e04f73cb05eed5a5b 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -25,6 +25,7 @@ use zed_actions::agent::{ ResolveConflictsWithAgent, ReviewBranchDiff, }; +use crate::thread_metadata_store::ThreadMetadataStore; use crate::{ AddContextServer, AgentDiffPane, ConversationView, CopyThreadToClipboard, CycleStartThreadIn, Follow, InlineAssistant, LoadThreadFromClipboard, NewThread, OpenActiveThreadAsMarkdown, @@ -753,28 +754,21 @@ impl AgentPanel { .as_ref() .and_then(|p| p.last_active_thread.as_ref()) { - if thread_info.agent_type.is_native() { - let session_id = acp::SessionId::new(thread_info.session_id.clone()); - let load_result = cx.update(|_window, cx| { - let thread_store = ThreadStore::global(cx); - thread_store.update(cx, |store, cx| store.load_thread(session_id, cx)) - }); - let thread_exists = if let Ok(task) = load_result { - task.await.ok().flatten().is_some() - } else { - false - }; - if thread_exists { - Some(thread_info) - } else { - log::warn!( - "last active thread {} not found in database, skipping restoration", - thread_info.session_id - ); - None - } - } else { + let session_id = acp::SessionId::new(thread_info.session_id.clone()); + let has_metadata = cx + .update(|_window, cx| { + let store = ThreadMetadataStore::global(cx); + store.read(cx).entry(&session_id).is_some() + }) + .unwrap_or(false); + if has_metadata { Some(thread_info) + } else { + log::warn!( + "last active thread {} has no metadata, skipping restoration", + thread_info.session_id + ); + None } } else { None @@ -1734,6 +1728,10 @@ impl AgentPanel { return; }; + if thread_view.read(cx).thread.read(cx).entries().is_empty() { + return; + } + self.background_threads .insert(thread_view.read(cx).id.clone(), conversation_view); self.cleanup_background_threads(cx); @@ -2078,6 +2076,10 @@ impl AgentPanel { window: &mut Window, cx: &mut Context, ) { + if let Some(store) = ThreadMetadataStore::try_global(cx) { + store.update(cx, |store, cx| store.unarchive(&session_id, cx)); + } + if let Some(conversation_view) = self.background_threads.remove(&session_id) { self.set_active_view( ActiveView::AgentThread { conversation_view }, @@ -2588,7 +2590,7 @@ impl AgentPanel { anyhow::Ok(()) }); - self._worktree_creation_task = Some(cx.foreground_executor().spawn(async move { + self._worktree_creation_task = Some(cx.background_spawn(async move { task.await.log_err(); })); } @@ -2745,6 +2747,10 @@ impl AgentPanel { new_window_handle.update(cx, |multi_workspace, window, cx| { multi_workspace.activate(new_workspace.clone(), window, cx); + + new_workspace.update(cx, |workspace, cx| { + workspace.run_create_worktree_tasks(window, cx); + }) })?; this.update_in(cx, |this, window, cx| { @@ -4297,6 +4303,8 @@ mod tests { ); }); + send_message(&panel_a, cx); + let agent_type_a = panel_a.read_with(cx, |panel, _cx| panel.selected_agent.clone()); // --- Set up workspace B: ClaudeCode, no active thread --- @@ -4356,6 +4364,72 @@ mod tests { }); } + #[gpui::test] + async fn test_non_native_thread_without_metadata_is_not_restored(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + }); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = multi_workspace + .read_with(cx, |multi_workspace, _cx| { + multi_workspace.workspace().clone() + }) + .unwrap(); + + workspace.update(cx, |workspace, _cx| { + workspace.set_random_database_id(); + }); + + let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx); + + let panel = workspace.update_in(cx, |workspace, window, cx| { + cx.new(|cx| AgentPanel::new(workspace, None, window, cx)) + }); + + panel.update_in(cx, |panel, window, cx| { + panel.open_external_thread_with_server( + Rc::new(StubAgentServer::default_response()), + window, + cx, + ); + }); + + cx.run_until_parked(); + + panel.read_with(cx, |panel, cx| { + assert!( + panel.active_agent_thread(cx).is_some(), + "should have an active thread after connection" + ); + }); + + // Serialize without ever sending a message, so no thread metadata exists. + panel.update(cx, |panel, cx| panel.serialize(cx)); + cx.run_until_parked(); + + let async_cx = cx.update(|window, cx| window.to_async(cx)); + let loaded = AgentPanel::load(workspace.downgrade(), async_cx) + .await + .expect("panel load should succeed"); + cx.run_until_parked(); + + loaded.read_with(cx, |panel, _cx| { + assert!( + panel.active_conversation_view().is_none(), + "thread without metadata should not be restored" + ); + }); + } + /// Extracts the text from a Text content block, panicking if it's not Text. fn expect_text_block(block: &acp::ContentBlock) -> &str { match block { @@ -4698,6 +4772,38 @@ mod tests { (panel, cx) } + #[gpui::test] + async fn test_empty_draft_thread_not_retained_when_navigating_away(cx: &mut TestAppContext) { + let (panel, mut cx) = setup_panel(cx).await; + + let connection_a = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection_a, &mut cx); + let session_id_a = active_session_id(&panel, &cx); + + panel.read_with(&cx, |panel, cx| { + let thread = panel.active_agent_thread(cx).unwrap(); + assert!( + thread.read(cx).entries().is_empty(), + "newly opened draft thread should have no entries" + ); + assert!(panel.background_threads.is_empty()); + }); + + let connection_b = StubAgentConnection::new(); + open_thread_with_connection(&panel, connection_b, &mut cx); + + panel.read_with(&cx, |panel, _cx| { + assert!( + panel.background_threads.is_empty(), + "empty draft thread should not be retained in background_threads" + ); + assert!( + !panel.background_threads.contains_key(&session_id_a), + "empty draft thread should not be keyed in background_threads" + ); + }); + } + #[gpui::test] async fn test_running_thread_retained_when_navigating_away(cx: &mut TestAppContext) { let (panel, mut cx) = setup_panel(cx).await; @@ -4809,6 +4915,7 @@ mod tests { // Open thread B — thread A goes to background. let connection_b = StubAgentConnection::new(); open_thread_with_connection(&panel, connection_b, &mut cx); + send_message(&panel, &mut cx); let session_id_b = active_session_id(&panel, &cx); @@ -5068,7 +5175,7 @@ mod tests { multi_workspace .read_with(cx, |multi_workspace, _cx| { assert_eq!( - multi_workspace.workspaces().len(), + multi_workspace.workspaces().count(), 1, "LocalProject should not create a new workspace" ); @@ -5344,6 +5451,11 @@ mod tests { let multi_workspace = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + multi_workspace + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .unwrap(); let workspace = multi_workspace .read_with(cx, |multi_workspace, _cx| { @@ -5431,15 +5543,14 @@ mod tests { .read_with(cx, |multi_workspace, cx| { // There should be more than one workspace now (the original + the new worktree). assert!( - multi_workspace.workspaces().len() > 1, + multi_workspace.workspaces().count() > 1, "expected a new workspace to have been created, found {}", - multi_workspace.workspaces().len(), + multi_workspace.workspaces().count(), ); // Check the newest workspace's panel for the correct agent. let new_workspace = multi_workspace .workspaces() - .iter() .find(|ws| ws.entity_id() != workspace.entity_id()) .expect("should find the new workspace"); let new_panel = new_workspace diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 98715056ccec43fb91cc4dc9307cf41d84719fc0..5cff5bfc38d4512d659d919c6e7c4ff02fcc0caf 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -173,6 +173,22 @@ actions!( ToggleThinkingEffortMenu, /// Toggles fast mode for models that support it. ToggleFastMode, + /// Scroll the output by one page up. + ScrollOutputPageUp, + /// Scroll the output by one page down. + ScrollOutputPageDown, + /// Scroll the output up by three lines. + ScrollOutputLineUp, + /// Scroll the output down by three lines. + ScrollOutputLineDown, + /// Scroll the output to the top. + ScrollOutputToTop, + /// Scroll the output to the bottom. + ScrollOutputToBottom, + /// Scroll the output to the previous user message. + ScrollOutputToPreviousMessage, + /// Scroll the output to the next user message. + ScrollOutputToNextMessage, ] ); @@ -674,7 +690,9 @@ mod tests { use feature_flags::FeatureFlagAppExt; use gpui::{BorrowAppContext, TestAppContext, px}; use project::DisableAiSettings; - use settings::{DockPosition, NotifyWhenAgentWaiting, Settings, SettingsStore}; + use settings::{ + DockPosition, NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, Settings, SettingsStore, + }; #[gpui::test] fn test_agent_command_palette_visibility(cx: &mut TestAppContext) { @@ -705,7 +723,7 @@ mod tests { default_profile: AgentProfileId::default(), profiles: Default::default(), notify_when_agent_waiting: NotifyWhenAgentWaiting::default(), - play_sound_when_agent_done: false, + play_sound_when_agent_done: PlaySoundWhenAgentDone::Never, single_file_review: false, model_parameters: vec![], enable_feedback: false, @@ -716,6 +734,7 @@ mod tests { message_editor_min_lines: 1, tool_permissions: Default::default(), show_turn_stats: false, + show_merge_conflict_indicator: true, new_thread_location: Default::default(), sidebar_side: Default::default(), thinking_display: Default::default(), diff --git a/crates/agent_ui/src/buffer_codegen.rs b/crates/agent_ui/src/buffer_codegen.rs index 420f8665e349c4e79222cdfa034de44971fab538..d5288c564d7211a986fa6347e2b74782c58d9c75 100644 --- a/crates/agent_ui/src/buffer_codegen.rs +++ b/crates/agent_ui/src/buffer_codegen.rs @@ -303,7 +303,7 @@ impl CodegenAlternative { let snapshot = buffer.read(cx).snapshot(cx); let (old_buffer, _, _) = snapshot - .range_to_buffer_ranges(range.start..=range.end) + .range_to_buffer_ranges(range.start..range.end) .pop() .unwrap(); let old_buffer = cx.new(|cx| { @@ -684,7 +684,7 @@ impl CodegenAlternative { let language_name = { let multibuffer = self.buffer.read(cx); let snapshot = multibuffer.snapshot(cx); - let ranges = snapshot.range_to_buffer_ranges(self.range.start..=self.range.end); + let ranges = snapshot.range_to_buffer_ranges(self.range.start..self.range.end); ranges .first() .and_then(|(buffer, _, _)| buffer.language()) diff --git a/crates/agent_ui/src/completion_provider.rs b/crates/agent_ui/src/completion_provider.rs index 6259269834b0add5b87fd9d397e17671d30adb9f..47fd7b0295adbcd2ecea768c3bd9e321a5f551b9 100644 --- a/crates/agent_ui/src/completion_provider.rs +++ b/crates/agent_ui/src/completion_provider.rs @@ -9,9 +9,7 @@ use crate::ThreadHistory; use acp_thread::MentionUri; use agent_client_protocol as acp; use anyhow::Result; -use editor::{ - CompletionProvider, Editor, ExcerptId, code_context_menus::COMPLETION_MENU_MAX_WIDTH, -}; +use editor::{CompletionProvider, Editor, code_context_menus::COMPLETION_MENU_MAX_WIDTH}; use futures::FutureExt as _; use fuzzy::{PathMatch, StringMatch, StringMatchCandidate}; use gpui::{App, BackgroundExecutor, Entity, SharedString, Task, WeakEntity}; @@ -621,7 +619,7 @@ impl PromptCompletionProvider { for (terminal_text, terminal_range) in terminal_ranges { let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx); let Some(start) = - snapshot.as_singleton_anchor(source_range.start) + snapshot.anchor_in_excerpt(source_range.start) else { return; }; @@ -1235,7 +1233,6 @@ impl PromptCompletionProvider { impl CompletionProvider for PromptCompletionProvider { fn completions( &self, - _excerpt_id: ExcerptId, buffer: &Entity, buffer_position: Anchor, _trigger: CompletionContext, @@ -2147,7 +2144,7 @@ fn build_code_label_for_path( .theme() .syntax() .highlight_id("variable") - .map(HighlightId); + .map(HighlightId::new); let mut label = CodeLabelBuilder::default(); label.push_str(file, None); diff --git a/crates/agent_ui/src/conversation_view.rs b/crates/agent_ui/src/conversation_view.rs index 2231f421bc2af0d8038c002a72c226f551f243cc..7c9acfdf27d5b750afe4b8817af7f657f5fcdecc 100644 --- a/crates/agent_ui/src/conversation_view.rs +++ b/crates/agent_ui/src/conversation_view.rs @@ -85,8 +85,11 @@ use crate::{ AuthorizeToolCall, ClearMessageQueue, CycleFavoriteModels, CycleModeSelector, CycleThinkingEffort, EditFirstQueuedMessage, ExpandMessageEditor, Follow, KeepAll, NewThread, OpenAddContextMenu, OpenAgentDiff, OpenHistory, RejectAll, RejectOnce, - RemoveFirstQueuedMessage, SendImmediately, SendNextQueuedMessage, ToggleFastMode, - ToggleProfileSelector, ToggleThinkingEffortMenu, ToggleThinkingMode, UndoLastReject, + RemoveFirstQueuedMessage, ScrollOutputLineDown, ScrollOutputLineUp, ScrollOutputPageDown, + ScrollOutputPageUp, ScrollOutputToBottom, ScrollOutputToNextMessage, + ScrollOutputToPreviousMessage, ScrollOutputToTop, SendImmediately, SendNextQueuedMessage, + ToggleFastMode, ToggleProfileSelector, ToggleThinkingEffortMenu, ToggleThinkingMode, + UndoLastReject, }; const STOPWATCH_THRESHOLD: Duration = Duration::from_secs(30); @@ -809,7 +812,7 @@ impl ConversationView { let agent_id = self.agent.agent_id(); let session_capabilities = Arc::new(RwLock::new(SessionCapabilities::new( thread.read(cx).prompt_capabilities(), - vec![], + thread.read(cx).available_commands().to_vec(), ))); let action_log = thread.read(cx).action_log().clone(); @@ -828,6 +831,8 @@ impl ConversationView { let count = thread.read(cx).entries().len(); let list_state = ListState::new(0, gpui::ListAlignment::Top, px(2048.0)); + list_state.set_follow_mode(gpui::FollowMode::Tail); + entry_view_state.update(cx, |view_state, cx| { for ix in 0..count { view_state.sync_entry(ix, &thread, window, cx); @@ -841,7 +846,7 @@ impl ConversationView { if let Some(scroll_position) = thread.read(cx).ui_scroll_position() { list_state.scroll_to(scroll_position); } else { - list_state.set_follow_tail(true); + list_state.scroll_to_end(); } AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx); @@ -1257,9 +1262,11 @@ impl ConversationView { AcpThreadEvent::EntryUpdated(index) => { if let Some(active) = self.thread_view(&thread_id) { let entry_view_state = active.read(cx).entry_view_state.clone(); + let list_state = active.read(cx).list_state.clone(); entry_view_state.update(cx, |view_state, cx| { - view_state.sync_entry(*index, thread, window, cx) + view_state.sync_entry(*index, thread, window, cx); }); + list_state.remeasure_items(*index..*index + 1); active.update(cx, |active, cx| { active.auto_expand_streaming_thought(cx); }); @@ -1295,10 +1302,16 @@ impl ConversationView { } AcpThreadEvent::Stopped(stop_reason) => { if let Some(active) = self.thread_view(&thread_id) { + let is_generating = + matches!(thread.read(cx).status(), ThreadStatus::Generating); active.update(cx, |active, cx| { - active.thread_retry_status.take(); - active.clear_auto_expand_tracking(); - active.list_state.set_follow_tail(false); + if !is_generating { + active.thread_retry_status.take(); + active.clear_auto_expand_tracking(); + if active.list_state.is_following_tail() { + active.list_state.scroll_to_end(); + } + } active.sync_generating_indicator(cx); }); } @@ -1367,9 +1380,15 @@ impl ConversationView { } AcpThreadEvent::Error => { if let Some(active) = self.thread_view(&thread_id) { + let is_generating = + matches!(thread.read(cx).status(), ThreadStatus::Generating); active.update(cx, |active, cx| { - active.thread_retry_status.take(); - active.list_state.set_follow_tail(false); + if !is_generating { + active.thread_retry_status.take(); + if active.list_state.is_following_tail() { + active.list_state.scroll_to_end(); + } + } active.sync_generating_indicator(cx); }); } @@ -1429,40 +1448,24 @@ impl ConversationView { self.emit_token_limit_telemetry_if_needed(thread, cx); } AcpThreadEvent::AvailableCommandsUpdated(available_commands) => { - let mut available_commands = available_commands.clone(); - - if thread - .read(cx) - .connection() - .auth_methods() - .iter() - .any(|method| method.id().0.as_ref() == "claude-login") - { - available_commands.push(acp::AvailableCommand::new("login", "Authenticate")); - available_commands.push(acp::AvailableCommand::new("logout", "Authenticate")); - } + if let Some(thread_view) = self.thread_view(&thread_id) { + let has_commands = !available_commands.is_empty(); - let has_commands = !available_commands.is_empty(); - if let Some(active) = self.active_thread() { - active.update(cx, |active, _cx| { - active - .session_capabilities - .write() - .set_available_commands(available_commands); - }); - } - - let agent_display_name = self - .agent_server_store - .read(cx) - .agent_display_name(&self.agent.agent_id()) - .unwrap_or_else(|| self.agent.agent_id().0.to_string().into()); + let agent_display_name = self + .agent_server_store + .read(cx) + .agent_display_name(&self.agent.agent_id()) + .unwrap_or_else(|| self.agent.agent_id().0.to_string().into()); - if let Some(active) = self.active_thread() { let new_placeholder = placeholder_text(agent_display_name.as_ref(), has_commands); - active.update(cx, |active, cx| { - active.message_editor.update(cx, |editor, cx| { + + thread_view.update(cx, |thread_view, cx| { + thread_view + .session_capabilities + .write() + .set_available_commands(available_commands.clone()); + thread_view.message_editor.update(cx, |editor, cx| { editor.set_placeholder_text(&new_placeholder, window, cx); }); }); @@ -2329,9 +2332,9 @@ impl ConversationView { } } + #[cfg(feature = "audio")] fn play_notification_sound(&self, window: &Window, cx: &mut App) { - let settings = AgentSettings::get_global(cx); - let _visible = window.is_window_active() + let visible = window.is_window_active() && if let Some(mw) = window.root::().flatten() { self.agent_panel_visible(&mw, cx) } else { @@ -2339,8 +2342,8 @@ impl ConversationView { .upgrade() .is_some_and(|workspace| AgentPanel::is_visible(&workspace, cx)) }; - #[cfg(feature = "audio")] - if settings.play_sound_when_agent_done && !_visible { + let settings = AgentSettings::get_global(cx); + if settings.play_sound_when_agent_done.should_play(visible) { Audio::play_sound(Sound::AgentDone, cx); } } @@ -2970,6 +2973,166 @@ pub(crate) mod tests { }); } + #[derive(Clone)] + struct RestoredAvailableCommandsConnection; + + impl AgentConnection for RestoredAvailableCommandsConnection { + fn agent_id(&self) -> AgentId { + AgentId::new("restored-available-commands") + } + + fn telemetry_id(&self) -> SharedString { + "restored-available-commands".into() + } + + fn new_session( + self: Rc, + project: Entity, + _work_dirs: PathList, + cx: &mut App, + ) -> Task>> { + let thread = build_test_thread( + self, + project, + "RestoredAvailableCommandsConnection", + SessionId::new("new-session"), + cx, + ); + Task::ready(Ok(thread)) + } + + fn supports_load_session(&self) -> bool { + true + } + + fn load_session( + self: Rc, + session_id: acp::SessionId, + project: Entity, + _work_dirs: PathList, + _title: Option, + cx: &mut App, + ) -> Task>> { + let thread = build_test_thread( + self, + project, + "RestoredAvailableCommandsConnection", + session_id, + cx, + ); + + thread + .update(cx, |thread, cx| { + thread.handle_session_update( + acp::SessionUpdate::AvailableCommandsUpdate( + acp::AvailableCommandsUpdate::new(vec![acp::AvailableCommand::new( + "help", "Get help", + )]), + ), + cx, + ) + }) + .expect("available commands update should succeed"); + + Task::ready(Ok(thread)) + } + + fn auth_methods(&self) -> &[acp::AuthMethod] { + &[] + } + + fn authenticate( + &self, + _method_id: acp::AuthMethodId, + _cx: &mut App, + ) -> Task> { + Task::ready(Ok(())) + } + + fn prompt( + &self, + _id: Option, + _params: acp::PromptRequest, + _cx: &mut App, + ) -> Task> { + Task::ready(Ok(acp::PromptResponse::new(acp::StopReason::EndTurn))) + } + + fn cancel(&self, _session_id: &acp::SessionId, _cx: &mut App) {} + + fn into_any(self: Rc) -> Rc { + self + } + } + + #[gpui::test] + async fn test_restored_threads_keep_available_commands(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + + let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); + let connection_store = + cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); + + let conversation_view = cx.update(|window, cx| { + cx.new(|cx| { + ConversationView::new( + Rc::new(StubAgentServer::new(RestoredAvailableCommandsConnection)), + connection_store, + Agent::Custom { id: "Test".into() }, + Some(SessionId::new("restored-session")), + None, + None, + None, + workspace.downgrade(), + project, + Some(thread_store), + None, + window, + cx, + ) + }) + }); + + cx.run_until_parked(); + + let message_editor = message_editor(&conversation_view, cx); + let editor = + message_editor.update(cx, |message_editor, _cx| message_editor.editor().clone()); + let placeholder = editor.update(cx, |editor, cx| editor.placeholder_text(cx)); + + active_thread(&conversation_view, cx).read_with(cx, |view, _cx| { + let available_commands = view + .session_capabilities + .read() + .available_commands() + .to_vec(); + assert_eq!(available_commands.len(), 1); + assert_eq!(available_commands[0].name.as_str(), "help"); + assert_eq!(available_commands[0].description.as_str(), "Get help"); + }); + + assert_eq!( + placeholder, + Some("Message Test — @ to include context, / for commands".to_string()) + ); + + message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("/help", window, cx); + }); + + let contents_result = message_editor + .update(cx, |editor, cx| editor.contents(false, cx)) + .await; + + assert!(contents_result.is_ok()); + } + #[gpui::test] async fn test_resume_thread_uses_session_cwd_when_inside_project(cx: &mut TestAppContext) { init_test(cx); @@ -3356,7 +3519,6 @@ pub(crate) mod tests { // Verify workspace1 is no longer the active workspace multi_workspace_handle .read_with(cx, |mw, _cx| { - assert_eq!(mw.active_workspace_index(), 1); assert_ne!(mw.workspace(), &workspace1); }) .unwrap(); @@ -4851,6 +5013,63 @@ pub(crate) mod tests { }); } + #[gpui::test] + async fn test_stale_stop_does_not_disable_follow_tail_during_regenerate( + cx: &mut TestAppContext, + ) { + init_test(cx); + + let connection = StubAgentConnection::new(); + + let (conversation_view, cx) = + setup_conversation_view(StubAgentServer::new(connection.clone()), cx).await; + add_to_workspace(conversation_view.clone(), cx); + + let message_editor = message_editor(&conversation_view, cx); + message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Original message to edit", window, cx); + }); + active_thread(&conversation_view, cx) + .update_in(cx, |view, window, cx| view.send(window, cx)); + + cx.run_until_parked(); + + let user_message_editor = conversation_view.read_with(cx, |view, cx| { + view.active_thread() + .map(|active| &active.read(cx).entry_view_state) + .as_ref() + .unwrap() + .read(cx) + .entry(0) + .unwrap() + .message_editor() + .unwrap() + .clone() + }); + + cx.focus(&user_message_editor); + user_message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Edited message content", window, cx); + }); + + user_message_editor.update_in(cx, |_editor, window, cx| { + window.dispatch_action(Box::new(Chat), cx); + }); + + cx.run_until_parked(); + + conversation_view.read_with(cx, |view, cx| { + let active = view.active_thread().unwrap(); + let active = active.read(cx); + + assert_eq!(active.thread.read(cx).status(), ThreadStatus::Generating); + assert!( + active.list_state.is_following_tail(), + "stale stop events from the cancelled turn must not disable follow-tail for the new turn" + ); + }); + } + struct GeneratingThreadSetup { conversation_view: Entity, thread: Entity, diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs index b25769eadbe31c35a6261cc9433349a2943617be..685621eb3c93632f1e7410bbbad22b623d5e18c7 100644 --- a/crates/agent_ui/src/conversation_view/thread_view.rs +++ b/crates/agent_ui/src/conversation_view/thread_view.rs @@ -344,7 +344,8 @@ impl ThreadView { ) -> Self { let id = thread.read(cx).session_id().clone(); - let placeholder = placeholder_text(agent_display_name.as_ref(), false); + let has_commands = !session_capabilities.read().available_commands().is_empty(); + let placeholder = placeholder_text(agent_display_name.as_ref(), has_commands); let history_subscription = history.as_ref().map(|h| { cx.observe(h, |this, history, cx| { @@ -541,31 +542,15 @@ impl ThreadView { let thread_view = cx.entity().downgrade(); this.list_state - .set_scroll_handler(move |event, _window, cx| { + .set_scroll_handler(move |_event, _window, cx| { let list_state = list_state_for_scroll.clone(); let thread_view = thread_view.clone(); - let is_following_tail = event.is_following_tail; // N.B. We must defer because the scroll handler is called while the // ListState's RefCell is mutably borrowed. Reading logical_scroll_top() // directly would panic from a double borrow. cx.defer(move |cx| { let scroll_top = list_state.logical_scroll_top(); let _ = thread_view.update(cx, |this, cx| { - if !is_following_tail { - let is_at_bottom = { - let current_offset = - list_state.scroll_px_offset_for_scrollbar().y.abs(); - let max_offset = list_state.max_offset_for_scrollbar().y; - current_offset >= max_offset - px(1.0) - }; - - let is_generating = - matches!(this.thread.read(cx).status(), ThreadStatus::Generating); - - if is_at_bottom && is_generating { - list_state.set_follow_tail(true); - } - } if let Some(thread) = this.as_native_thread(cx) { thread.update(cx, |thread, _cx| { thread.set_ui_scroll_position(Some(scroll_top)); @@ -832,13 +817,10 @@ impl ThreadView { } } })); - if self.parent_id.is_none() { - self.suppress_merge_conflict_notification(cx); - } generation } - pub fn stop_turn(&mut self, generation: usize, cx: &mut Context) { + pub fn stop_turn(&mut self, generation: usize, _cx: &mut Context) { if self.turn_fields.turn_generation != generation { return; } @@ -849,25 +831,6 @@ impl ThreadView { .map(|started| started.elapsed()); self.turn_fields.last_turn_tokens = self.turn_fields.turn_tokens.take(); self.turn_fields._turn_timer_task = None; - if self.parent_id.is_none() { - self.unsuppress_merge_conflict_notification(cx); - } - } - - fn suppress_merge_conflict_notification(&self, cx: &mut Context) { - self.workspace - .update(cx, |workspace, cx| { - workspace.suppress_notification(&workspace::merge_conflict_notification_id(), cx); - }) - .ok(); - } - - fn unsuppress_merge_conflict_notification(&self, cx: &mut Context) { - self.workspace - .update(cx, |workspace, _cx| { - workspace.unsuppress(workspace::merge_conflict_notification_id()); - }) - .ok(); } pub fn update_turn_tokens(&mut self, cx: &App) { @@ -1077,7 +1040,7 @@ impl ThreadView { })?; let _ = this.update(cx, |this, cx| { - this.list_state.set_follow_tail(true); + this.list_state.scroll_to_end(); cx.notify(); }); @@ -4978,6 +4941,105 @@ impl ThreadView { cx.notify(); } + fn scroll_output_page_up( + &mut self, + _: &ScrollOutputPageUp, + _window: &mut Window, + cx: &mut Context, + ) { + let page_height = self.list_state.viewport_bounds().size.height; + self.list_state.scroll_by(-page_height * 0.9); + cx.notify(); + } + + fn scroll_output_page_down( + &mut self, + _: &ScrollOutputPageDown, + _window: &mut Window, + cx: &mut Context, + ) { + let page_height = self.list_state.viewport_bounds().size.height; + self.list_state.scroll_by(page_height * 0.9); + cx.notify(); + } + + fn scroll_output_line_up( + &mut self, + _: &ScrollOutputLineUp, + window: &mut Window, + cx: &mut Context, + ) { + self.list_state.scroll_by(-window.line_height() * 3.); + cx.notify(); + } + + fn scroll_output_line_down( + &mut self, + _: &ScrollOutputLineDown, + window: &mut Window, + cx: &mut Context, + ) { + self.list_state.scroll_by(window.line_height() * 3.); + cx.notify(); + } + + fn scroll_output_to_top( + &mut self, + _: &ScrollOutputToTop, + _window: &mut Window, + cx: &mut Context, + ) { + self.scroll_to_top(cx); + } + + fn scroll_output_to_bottom( + &mut self, + _: &ScrollOutputToBottom, + _window: &mut Window, + cx: &mut Context, + ) { + self.scroll_to_end(cx); + } + + fn scroll_output_to_previous_message( + &mut self, + _: &ScrollOutputToPreviousMessage, + _window: &mut Window, + cx: &mut Context, + ) { + let entries = self.thread.read(cx).entries(); + let current_ix = self.list_state.logical_scroll_top().item_ix; + if let Some(target_ix) = (0..current_ix) + .rev() + .find(|&i| matches!(entries.get(i), Some(AgentThreadEntry::UserMessage(_)))) + { + self.list_state.scroll_to(ListOffset { + item_ix: target_ix, + offset_in_item: px(0.), + }); + cx.notify(); + } + } + + fn scroll_output_to_next_message( + &mut self, + _: &ScrollOutputToNextMessage, + _window: &mut Window, + cx: &mut Context, + ) { + let entries = self.thread.read(cx).entries(); + let current_ix = self.list_state.logical_scroll_top().item_ix; + if let Some(target_ix) = (current_ix + 1..entries.len()) + .find(|&i| matches!(entries.get(i), Some(AgentThreadEntry::UserMessage(_)))) + { + self.list_state.scroll_to(ListOffset { + item_ix: target_ix, + offset_in_item: px(0.), + }); + cx.notify(); + } + } + pub fn open_thread_as_markdown( &self, workspace: Entity, @@ -5207,9 +5269,12 @@ impl ThreadView { match thinking_display { ThinkingBlockDisplay::Auto => { - if self.expanded_thinking_blocks.contains(&key) { + let is_open = self.expanded_thinking_blocks.contains(&key) + || self.user_toggled_thinking_blocks.contains(&key); + + if is_open { self.expanded_thinking_blocks.remove(&key); - self.user_toggled_thinking_blocks.insert(key); + self.user_toggled_thinking_blocks.remove(&key); } else { self.expanded_thinking_blocks.insert(key); self.user_toggled_thinking_blocks.insert(key); @@ -7126,17 +7191,10 @@ impl ThreadView { }; active_editor.update_in(cx, |editor, window, cx| { - let singleton = editor - .buffer() - .read(cx) - .read(cx) - .as_singleton() - .map(|(a, b, _)| (a, b)); - if let Some((excerpt_id, buffer_id)) = singleton - && let Some(agent_buffer) = agent_location.buffer.upgrade() - && agent_buffer.read(cx).remote_id() == buffer_id + let snapshot = editor.buffer().read(cx).snapshot(cx); + if snapshot.as_singleton().is_some() + && let Some(anchor) = snapshot.anchor_in_excerpt(agent_location.position) { - let anchor = editor::Anchor::in_buffer(excerpt_id, agent_location.position); editor.change_selections(Default::default(), window, cx, |selections| { selections.select_anchor_ranges([anchor..anchor]); }) @@ -7332,9 +7390,8 @@ impl ThreadView { .gap_2() .map(|this| { if card_layout { - this.when(context_ix > 0, |this| { - this.pt_2() - .border_t_1() + this.p_2().when(context_ix > 0, |this| { + this.border_t_1() .border_color(self.tool_card_border_color(cx)) }) } else { @@ -8545,6 +8602,14 @@ impl Render for ThreadView { .on_action(cx.listener(Self::handle_toggle_command_pattern)) .on_action(cx.listener(Self::open_permission_dropdown)) .on_action(cx.listener(Self::open_add_context_menu)) + .on_action(cx.listener(Self::scroll_output_page_up)) + .on_action(cx.listener(Self::scroll_output_page_down)) + .on_action(cx.listener(Self::scroll_output_line_up)) + .on_action(cx.listener(Self::scroll_output_line_down)) + .on_action(cx.listener(Self::scroll_output_to_top)) + .on_action(cx.listener(Self::scroll_output_to_bottom)) + .on_action(cx.listener(Self::scroll_output_to_previous_message)) + .on_action(cx.listener(Self::scroll_output_to_next_message)) .on_action(cx.listener(|this, _: &ToggleFastMode, _window, cx| { this.toggle_fast_mode(cx); })) @@ -8732,7 +8797,7 @@ pub(crate) fn open_link( .open_path(path, None, true, window, cx) .detach_and_log_err(cx); } - MentionUri::PastedImage => {} + MentionUri::PastedImage { .. } => {} MentionUri::Directory { abs_path } => { let project = workspace.project(); let Some(entry_id) = project.update(cx, |project, cx| { diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index 01543b657fc2d00fbf8c68cd96c6329d2f4952d6..39d70790e0d4a18554b2a1c11510e529d921cd1b 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -27,8 +27,8 @@ use editor::RowExt; use editor::SelectionEffects; use editor::scroll::ScrollOffset; use editor::{ - Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, ExcerptId, HighlightKey, - MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint, + Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, HighlightKey, MultiBuffer, + MultiBufferSnapshot, ToOffset as _, ToPoint, actions::SelectAll, display_map::{ BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, EditorMargins, @@ -443,15 +443,17 @@ impl InlineAssistant { let newest_selection = newest_selection.unwrap(); let mut codegen_ranges = Vec::new(); - for (buffer, buffer_range, excerpt_id) in - snapshot.ranges_to_buffer_ranges(selections.iter().map(|selection| { - snapshot.anchor_before(selection.start)..snapshot.anchor_after(selection.end) - })) + for (buffer, buffer_range, _) in selections + .iter() + .flat_map(|selection| snapshot.range_to_buffer_ranges(selection.start..selection.end)) { - let anchor_range = Anchor::range_in_buffer( - excerpt_id, - buffer.anchor_before(buffer_range.start)..buffer.anchor_after(buffer_range.end), - ); + let (Some(start), Some(end)) = ( + snapshot.anchor_in_buffer(buffer.anchor_before(buffer_range.start)), + snapshot.anchor_in_buffer(buffer.anchor_after(buffer_range.end)), + ) else { + continue; + }; + let anchor_range = start..end; codegen_ranges.push(anchor_range); @@ -982,8 +984,7 @@ impl InlineAssistant { match event { EditorEvent::Edited { transaction_id } => { let buffer = editor.read(cx).buffer().read(cx); - let edited_ranges = - buffer.edited_ranges_for_transaction::(*transaction_id, cx); + let edited_ranges = buffer.edited_ranges_for_transaction(*transaction_id, cx); let snapshot = buffer.snapshot(cx); for assist_id in editor_assists.assist_ids.clone() { @@ -1089,7 +1090,7 @@ impl InlineAssistant { let multibuffer = editor.read(cx).buffer().read(cx); let snapshot = multibuffer.snapshot(cx); let ranges = - snapshot.range_to_buffer_ranges(assist.range.start..=assist.range.end); + snapshot.range_to_buffer_ranges(assist.range.start..assist.range.end); ranges .first() .and_then(|(buffer, _, _)| buffer.language()) @@ -1496,10 +1497,10 @@ impl InlineAssistant { let mut new_blocks = Vec::new(); for (new_row, old_row_range) in deleted_row_ranges { - let (_, start, _) = old_snapshot + let (_, start) = old_snapshot .point_to_buffer_point(Point::new(*old_row_range.start(), 0)) .unwrap(); - let (_, end, _) = old_snapshot + let (_, end) = old_snapshot .point_to_buffer_point(Point::new( *old_row_range.end(), old_snapshot.line_len(MultiBufferRow(*old_row_range.end())), @@ -1530,7 +1531,7 @@ impl InlineAssistant { editor.set_read_only(true); editor.set_show_edit_predictions(Some(false), window, cx); editor.highlight_rows::( - Anchor::min()..Anchor::max(), + Anchor::Min..Anchor::Max, cx.theme().status().deleted_background, Default::default(), cx, @@ -1938,9 +1939,8 @@ impl CodeActionProvider for AssistantCodeActionProvider { fn apply_code_action( &self, - buffer: Entity, + _buffer: Entity, action: CodeAction, - excerpt_id: ExcerptId, _push_to_history: bool, window: &mut Window, cx: &mut App, @@ -1970,31 +1970,8 @@ impl CodeActionProvider for AssistantCodeActionProvider { let range = editor .update(cx, |editor, cx| { editor.buffer().update(cx, |multibuffer, cx| { - let buffer = buffer.read(cx); - let multibuffer_snapshot = multibuffer.read(cx); - - let old_context_range = - multibuffer_snapshot.context_range_for_excerpt(excerpt_id)?; - let mut new_context_range = old_context_range.clone(); - if action - .range - .start - .cmp(&old_context_range.start, buffer) - .is_lt() - { - new_context_range.start = action.range.start; - } - if action.range.end.cmp(&old_context_range.end, buffer).is_gt() { - new_context_range.end = action.range.end; - } - drop(multibuffer_snapshot); - - if new_context_range != old_context_range { - multibuffer.resize_excerpt(excerpt_id, new_context_range, cx); - } - let multibuffer_snapshot = multibuffer.read(cx); - multibuffer_snapshot.anchor_range_in_excerpt(excerpt_id, action.range) + multibuffer_snapshot.buffer_anchor_range_to_anchor_range(action.range) }) }) .context("invalid range")?; @@ -2048,7 +2025,7 @@ fn merge_ranges(ranges: &mut Vec>, buffer: &MultiBufferSnapshot) { pub mod evals { use crate::InlineAssistant; use agent::ThreadStore; - use client::{Client, UserStore}; + use client::{Client, RefreshLlmTokenListener, UserStore}; use editor::{Editor, MultiBuffer, MultiBufferOffset}; use eval_utils::{EvalOutput, NoProcessor}; use fs::FakeFs; @@ -2114,7 +2091,8 @@ pub mod evals { client::init(&client, cx); workspace::init(app_state.clone(), cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store, client.clone(), cx); cx.set_global(inline_assistant); diff --git a/crates/agent_ui/src/mention_set.rs b/crates/agent_ui/src/mention_set.rs index 2559edc566d4467eaaab180e0a16f4af5fae7ab9..1b2ec0ad2fd460b4eec5a8b757bdd3058d4a3704 100644 --- a/crates/agent_ui/src/mention_set.rs +++ b/crates/agent_ui/src/mention_set.rs @@ -6,7 +6,7 @@ use agent_servers::{AgentServer, AgentServerDelegate}; use anyhow::{Context as _, Result, anyhow}; use collections::{HashMap, HashSet}; use editor::{ - Anchor, Editor, EditorSnapshot, ExcerptId, FoldPlaceholder, ToOffset, + Anchor, Editor, EditorSnapshot, FoldPlaceholder, ToOffset, display_map::{Crease, CreaseId, CreaseMetadata, FoldId}, scroll::Autoscroll, }; @@ -154,7 +154,7 @@ impl MentionSet { MentionUri::Selection { abs_path: None, .. } => Task::ready(Err(anyhow!( "Untitled buffer selection mentions are not supported for paste" ))), - MentionUri::PastedImage + MentionUri::PastedImage { .. } | MentionUri::TerminalSelection { .. } | MentionUri::MergeConflict { .. } => { Task::ready(Err(anyhow!("Unsupported mention URI type for paste"))) @@ -204,10 +204,9 @@ impl MentionSet { }; let snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx)); - let Some(start_anchor) = snapshot.buffer_snapshot().as_singleton_anchor(start) else { + let Some(start_anchor) = snapshot.buffer_snapshot().anchor_in_excerpt(start) else { return Task::ready(()); }; - let excerpt_id = start_anchor.excerpt_id; let end_anchor = snapshot.buffer_snapshot().anchor_before( start_anchor.to_offset(&snapshot.buffer_snapshot()) + content_len + 1usize, ); @@ -234,7 +233,6 @@ impl MentionSet { }) .shared(); insert_crease_for_mention( - excerpt_id, start, content_len, mention_uri.name().into(), @@ -249,7 +247,6 @@ impl MentionSet { ) } else { insert_crease_for_mention( - excerpt_id, start, content_len, crease_text, @@ -286,7 +283,7 @@ impl MentionSet { include_errors, include_warnings, } => self.confirm_mention_for_diagnostics(include_errors, include_warnings, cx), - MentionUri::PastedImage => { + MentionUri::PastedImage { .. } => { debug_panic!("pasted image URI should not be included in completions"); Task::ready(Err(anyhow!( "pasted imaged URI should not be included in completions" @@ -468,7 +465,7 @@ impl MentionSet { }; let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx); - let Some(start) = snapshot.as_singleton_anchor(source_range.start) else { + let Some(start) = snapshot.anchor_in_excerpt(source_range.start) else { return; }; @@ -742,22 +739,22 @@ pub(crate) async fn insert_images_as_context( return; } - let replacement_text = MentionUri::PastedImage.as_link().to_string(); - for (image, name) in images { - let Some((excerpt_id, text_anchor, multibuffer_anchor)) = editor + let mention_uri = MentionUri::PastedImage { + name: name.to_string(), + }; + let replacement_text = mention_uri.as_link().to_string(); + let Some((text_anchor, multibuffer_anchor)) = editor .update_in(cx, |editor, window, cx| { let snapshot = editor.snapshot(window, cx); - let (excerpt_id, _, buffer_snapshot) = - snapshot.buffer_snapshot().as_singleton().unwrap(); - - let cursor_anchor = editor.selections.newest_anchor().start.text_anchor; - let text_anchor = cursor_anchor.bias_left(&buffer_snapshot); - let multibuffer_anchor = snapshot + let (cursor_anchor, buffer_snapshot) = snapshot .buffer_snapshot() - .anchor_in_excerpt(excerpt_id, text_anchor); + .anchor_to_buffer_anchor(editor.selections.newest_anchor().start) + .unwrap(); + let text_anchor = cursor_anchor.bias_left(buffer_snapshot); + let multibuffer_anchor = snapshot.buffer_snapshot().anchor_in_excerpt(text_anchor); editor.insert(&format!("{replacement_text} "), window, cx); - (excerpt_id, text_anchor, multibuffer_anchor) + (text_anchor, multibuffer_anchor) }) .ok() else { @@ -775,7 +772,6 @@ pub(crate) async fn insert_images_as_context( let image = Arc::new(image); let Ok(Some((crease_id, tx))) = cx.update(|window, cx| { insert_crease_for_mention( - excerpt_id, text_anchor, content_len, name.clone(), @@ -810,7 +806,13 @@ pub(crate) async fn insert_images_as_context( .shared(); mention_set.update(cx, |mention_set, _cx| { - mention_set.insert_mention(crease_id, MentionUri::PastedImage, task.clone()) + mention_set.insert_mention( + crease_id, + MentionUri::PastedImage { + name: name.to_string(), + }, + task.clone(), + ) }); if task @@ -879,7 +881,7 @@ pub(crate) fn paste_images_as_context( Some(window.spawn(cx, async move |mut cx| { use itertools::Itertools; - let default_name: SharedString = MentionUri::PastedImage.name().into(); + let default_name: SharedString = "Image".into(); let (mut images, paths): (Vec<(gpui::Image, SharedString)>, Vec<_>) = clipboard .into_entries() .filter_map(|entry| match entry { @@ -909,7 +911,6 @@ pub(crate) fn paste_images_as_context( } pub(crate) fn insert_crease_for_mention( - excerpt_id: ExcerptId, anchor: text::Anchor, content_len: usize, crease_label: SharedString, @@ -927,7 +928,7 @@ pub(crate) fn insert_crease_for_mention( let crease_id = editor.update(cx, |editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); - let start = snapshot.anchor_in_excerpt(excerpt_id, anchor)?; + let start = snapshot.anchor_in_excerpt(anchor)?; let start = start.bias_right(&snapshot); let end = snapshot.anchor_before(start.to_offset(&snapshot) + content_len); diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index df36f38899c9abea165d0ff5a01834a2bb84c82f..0f59441ab27b5074a710c46a683e72d003a8d5d7 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -203,12 +203,10 @@ fn insert_mention_for_project_path( MentionInsertPosition::AtCursor => editor.update(cx, |editor, cx| { let buffer = editor.buffer().read(cx); let snapshot = buffer.snapshot(cx); - let (_, _, buffer_snapshot) = snapshot.as_singleton()?; - let text_anchor = editor - .selections - .newest_anchor() - .start - .text_anchor + let buffer_snapshot = snapshot.as_singleton()?; + let text_anchor = snapshot + .anchor_to_buffer_anchor(editor.selections.newest_anchor().start)? + .0 .bias_left(&buffer_snapshot); editor.insert(&mention_text, window, cx); @@ -224,7 +222,7 @@ fn insert_mention_for_project_path( editor.update(cx, |editor, cx| { editor.edit( [( - multi_buffer::Anchor::max()..multi_buffer::Anchor::max(), + multi_buffer::Anchor::Max..multi_buffer::Anchor::Max, new_text, )], cx, @@ -263,7 +261,7 @@ async fn resolve_pasted_context_items( ) -> (Vec, Vec>) { let mut items = Vec::new(); let mut added_worktrees = Vec::new(); - let default_image_name: SharedString = MentionUri::PastedImage.name().into(); + let default_image_name: SharedString = "Image".into(); for entry in entries { match entry { @@ -603,7 +601,7 @@ impl MessageEditor { COMMAND_HINT_INLAY_ID, hint_pos, &InlayHint { - position: hint_pos.text_anchor, + position: snapshot.anchor_to_buffer_anchor(hint_pos)?.0, label: InlayHintLabel::String(hint), kind: Some(InlayHintKind::Parameter), padding_left: false, @@ -640,12 +638,11 @@ impl MessageEditor { let start = self.editor.update(cx, |editor, cx| { editor.set_text(content, window, cx); - editor - .buffer() - .read(cx) - .snapshot(cx) - .anchor_before(Point::zero()) - .text_anchor + let snapshot = editor.buffer().read(cx).snapshot(cx); + snapshot + .anchor_to_buffer_anchor(snapshot.anchor_before(Point::zero())) + .unwrap() + .0 }); let supports_images = self.session_capabilities.read().supports_images(); @@ -815,7 +812,9 @@ impl MessageEditor { ) .uri(match uri { MentionUri::File { .. } => Some(uri.to_uri().to_string()), - MentionUri::PastedImage => None, + MentionUri::PastedImage { .. } => { + Some(uri.to_uri().to_string()) + } other => { debug_panic!( "unexpected mention uri for image: {:?}", @@ -999,13 +998,10 @@ impl MessageEditor { if should_insert_creases && let Some(selections) = editor_clipboard_selections { cx.stop_propagation(); - let insertion_target = self - .editor - .read(cx) - .selections - .newest_anchor() - .start - .text_anchor; + let snapshot = self.editor.read(cx).buffer().read(cx).snapshot(cx); + let (insertion_target, _) = snapshot + .anchor_to_buffer_anchor(self.editor.read(cx).selections.newest_anchor().start) + .unwrap(); let project = workspace.read(cx).project().clone(); for selection in selections { @@ -1021,21 +1017,19 @@ impl MessageEditor { }; let mention_text = mention_uri.as_link().to_string(); - let (excerpt_id, text_anchor, content_len) = - self.editor.update(cx, |editor, cx| { - let buffer = editor.buffer().read(cx); - let snapshot = buffer.snapshot(cx); - let (excerpt_id, _, buffer_snapshot) = snapshot.as_singleton().unwrap(); - let text_anchor = insertion_target.bias_left(&buffer_snapshot); + let (text_anchor, content_len) = self.editor.update(cx, |editor, cx| { + let buffer = editor.buffer().read(cx); + let snapshot = buffer.snapshot(cx); + let buffer_snapshot = snapshot.as_singleton().unwrap(); + let text_anchor = insertion_target.bias_left(&buffer_snapshot); - editor.insert(&mention_text, window, cx); - editor.insert(" ", window, cx); + editor.insert(&mention_text, window, cx); + editor.insert(" ", window, cx); - (excerpt_id, text_anchor, mention_text.len()) - }); + (text_anchor, mention_text.len()) + }); let Some((crease_id, tx)) = insert_crease_for_mention( - excerpt_id, text_anchor, content_len, crease_text.into(), @@ -1145,8 +1139,7 @@ impl MessageEditor { for (anchor, content_len, mention_uri) in all_mentions { let Some((crease_id, tx)) = insert_crease_for_mention( - anchor.excerpt_id, - anchor.text_anchor, + snapshot.anchor_to_buffer_anchor(anchor).unwrap().0, content_len, mention_uri.name().into(), mention_uri.icon_path(cx), @@ -1339,25 +1332,23 @@ impl MessageEditor { }; let mention_text = mention_uri.as_link().to_string(); - let (excerpt_id, text_anchor, content_len) = editor.update(cx, |editor, cx| { + let (text_anchor, content_len) = editor.update(cx, |editor, cx| { let buffer = editor.buffer().read(cx); let snapshot = buffer.snapshot(cx); - let (excerpt_id, _, buffer_snapshot) = snapshot.as_singleton().unwrap(); - let text_anchor = editor - .selections - .newest_anchor() - .start - .text_anchor + let buffer_snapshot = snapshot.as_singleton().unwrap(); + let text_anchor = snapshot + .anchor_to_buffer_anchor(editor.selections.newest_anchor().start) + .unwrap() + .0 .bias_left(&buffer_snapshot); editor.insert(&mention_text, window, cx); editor.insert(" ", window, cx); - (excerpt_id, text_anchor, mention_text.len()) + (text_anchor, mention_text.len()) }); let Some((crease_id, tx)) = insert_crease_for_mention( - excerpt_id, text_anchor, content_len, mention_uri.name().into(), @@ -1649,7 +1640,9 @@ impl MessageEditor { let mention_uri = if let Some(uri) = uri { MentionUri::parse(&uri, path_style) } else { - Ok(MentionUri::PastedImage) + Ok(MentionUri::PastedImage { + name: "Image".to_string(), + }) }; let Some(mention_uri) = mention_uri.log_err() else { continue; @@ -1700,8 +1693,7 @@ impl MessageEditor { let adjusted_start = insertion_start + range.start; let anchor = snapshot.anchor_before(MultiBufferOffset(adjusted_start)); let Some((crease_id, tx)) = insert_crease_for_mention( - anchor.excerpt_id, - anchor.text_anchor, + snapshot.anchor_to_buffer_anchor(anchor).unwrap().0, range.end - range.start, mention_uri.name().into(), mention_uri.icon_path(cx), @@ -2077,23 +2069,13 @@ mod tests { cx.run_until_parked(); - let excerpt_id = editor.update(cx, |editor, cx| { - editor - .buffer() - .read(cx) - .excerpt_ids() - .into_iter() - .next() - .unwrap() - }); let completions = editor.update_in(cx, |editor, window, cx| { editor.set_text("Hello @file ", window, cx); let buffer = editor.buffer().read(cx).as_singleton().unwrap(); let completion_provider = editor.completion_provider().unwrap(); completion_provider.completions( - excerpt_id, &buffer, - text::Anchor::MAX, + text::Anchor::max_for_buffer(buffer.read(cx).remote_id()), CompletionContext { trigger_kind: CompletionTriggerKind::TRIGGER_CHARACTER, trigger_character: Some("@".into()), @@ -2114,7 +2096,7 @@ mod tests { editor.update_in(cx, |editor, window, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); let range = snapshot - .anchor_range_in_excerpt(excerpt_id, completion.replace_range) + .buffer_anchor_range_to_anchor_range(completion.replace_range) .unwrap(); editor.edit([(range, completion.new_text)], cx); (completion.confirm.unwrap())(CompletionIntent::Complete, window, cx); @@ -4096,6 +4078,11 @@ mod tests { &mut cx, ); + let image_name = temporary_image_path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("Image") + .to_string(); std::fs::remove_file(&temporary_image_path).expect("remove temp png"); let expected_file_uri = MentionUri::File { @@ -4103,12 +4090,16 @@ mod tests { } .to_uri() .to_string(); - let expected_image_uri = MentionUri::PastedImage.to_uri().to_string(); + let expected_image_uri = MentionUri::PastedImage { + name: image_name.clone(), + } + .to_uri() + .to_string(); editor.update(&mut cx, |editor, cx| { assert_eq!( editor.text(cx), - format!("[@Image]({expected_image_uri}) [@file.txt]({expected_file_uri}) ") + format!("[@{image_name}]({expected_image_uri}) [@file.txt]({expected_file_uri}) ") ); }); @@ -4116,7 +4107,7 @@ mod tests { assert_eq!(contents.len(), 2); assert!(contents.iter().any(|(uri, mention)| { - *uri == MentionUri::PastedImage && matches!(mention, Mention::Image(_)) + matches!(uri, MentionUri::PastedImage { .. }) && matches!(mention, Mention::Image(_)) })); assert!(contents.iter().any(|(uri, mention)| { *uri == MentionUri::File { diff --git a/crates/agent_ui/src/profile_selector.rs b/crates/agent_ui/src/profile_selector.rs index 1bad3c45e4dece2397a2e026d659fd0fad043a24..963e32af55fda90f49edb0787f7327190c92681f 100644 --- a/crates/agent_ui/src/profile_selector.rs +++ b/crates/agent_ui/src/profile_selector.rs @@ -90,6 +90,7 @@ impl ProfileSelector { if let Some((next_profile_id, _)) = profiles.get_index(next_index) { self.provider.set_profile(next_profile_id.clone(), cx); + cx.notify(); } } diff --git a/crates/agent_ui/src/thread_import.rs b/crates/agent_ui/src/thread_import.rs index f5fc89d3df4991ff5186e2af6d73ad6a840c09a1..5402b1c74353b73a522a068aa32dfd0a9dc85c60 100644 --- a/crates/agent_ui/src/thread_import.rs +++ b/crates/agent_ui/src/thread_import.rs @@ -17,7 +17,7 @@ use ui::{ prelude::*, }; use util::ResultExt; -use workspace::{ModalView, MultiWorkspace, Workspace}; +use workspace::{ModalView, MultiWorkspace, PathList, Workspace}; use crate::{ Agent, AgentPanel, @@ -500,6 +500,7 @@ fn collect_importable_threads( updated_at: session.updated_at.unwrap_or_else(|| Utc::now()), created_at: session.created_at, folder_paths, + main_worktree_paths: PathList::default(), archived: true, }); } diff --git a/crates/agent_ui/src/thread_metadata_store.rs b/crates/agent_ui/src/thread_metadata_store.rs index 4c66d57bcfafe98432319a173e7736a581f1d986..fcd9665c52451d62fe8185abca919148a1666126 100644 --- a/crates/agent_ui/src/thread_metadata_store.rs +++ b/crates/agent_ui/src/thread_metadata_store.rs @@ -66,6 +66,7 @@ fn migrate_thread_metadata(cx: &mut App) { updated_at: entry.updated_at, created_at: entry.created_at, folder_paths: entry.folder_paths, + main_worktree_paths: PathList::default(), archived: true, }) }) @@ -126,6 +127,7 @@ pub struct ThreadMetadata { pub updated_at: DateTime, pub created_at: Option>, pub folder_paths: PathList, + pub main_worktree_paths: PathList, pub archived: bool, } @@ -149,6 +151,7 @@ pub struct ThreadMetadataStore { db: ThreadMetadataDb, threads: HashMap, threads_by_paths: HashMap>, + threads_by_main_paths: HashMap>, reload_task: Option>>, session_subscriptions: HashMap, pending_thread_ops_tx: smol::channel::Sender, @@ -238,6 +241,21 @@ impl ThreadMetadataStore { .filter(|s| !s.archived) } + /// Returns threads whose `main_worktree_paths` matches the given path list, + /// excluding archived threads. This finds threads that were opened in a + /// linked worktree but are associated with the given main worktree. + pub fn entries_for_main_worktree_path( + &self, + path_list: &PathList, + ) -> impl Iterator + '_ { + self.threads_by_main_paths + .get(path_list) + .into_iter() + .flatten() + .filter_map(|s| self.threads.get(s)) + .filter(|s| !s.archived) + } + fn reload(&mut self, cx: &mut Context) -> Shared> { let db = self.db.clone(); self.reload_task.take(); @@ -254,12 +272,19 @@ impl ThreadMetadataStore { this.update(cx, |this, cx| { this.threads.clear(); this.threads_by_paths.clear(); + this.threads_by_main_paths.clear(); for row in rows { this.threads_by_paths .entry(row.folder_paths.clone()) .or_default() .insert(row.session_id.clone()); + if !row.main_worktree_paths.is_empty() { + this.threads_by_main_paths + .entry(row.main_worktree_paths.clone()) + .or_default() + .insert(row.session_id.clone()); + } this.threads.insert(row.session_id.clone(), row); } @@ -298,12 +323,22 @@ impl ThreadMetadataStore { } fn save_internal(&mut self, metadata: ThreadMetadata) { - // If the folder paths have changed, we need to clear the old entry - if let Some(thread) = self.threads.get(&metadata.session_id) - && thread.folder_paths != metadata.folder_paths - && let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths) - { - session_ids.remove(&metadata.session_id); + if let Some(thread) = self.threads.get(&metadata.session_id) { + if thread.folder_paths != metadata.folder_paths { + if let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths) { + session_ids.remove(&metadata.session_id); + } + } + if thread.main_worktree_paths != metadata.main_worktree_paths + && !thread.main_worktree_paths.is_empty() + { + if let Some(session_ids) = self + .threads_by_main_paths + .get_mut(&thread.main_worktree_paths) + { + session_ids.remove(&metadata.session_id); + } + } } self.threads @@ -314,6 +349,13 @@ impl ThreadMetadataStore { .or_default() .insert(metadata.session_id.clone()); + if !metadata.main_worktree_paths.is_empty() { + self.threads_by_main_paths + .entry(metadata.main_worktree_paths.clone()) + .or_default() + .insert(metadata.session_id.clone()); + } + self.pending_thread_ops_tx .try_send(DbOperation::Upsert(metadata)) .log_err(); @@ -370,10 +412,18 @@ impl ThreadMetadataStore { return; } - if let Some(thread) = self.threads.get(&session_id) - && let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths) - { - session_ids.remove(&session_id); + if let Some(thread) = self.threads.get(&session_id) { + if let Some(session_ids) = self.threads_by_paths.get_mut(&thread.folder_paths) { + session_ids.remove(&session_id); + } + if !thread.main_worktree_paths.is_empty() { + if let Some(session_ids) = self + .threads_by_main_paths + .get_mut(&thread.main_worktree_paths) + { + session_ids.remove(&session_id); + } + } } self.threads.remove(&session_id); self.pending_thread_ops_tx @@ -397,14 +447,9 @@ impl ThreadMetadataStore { let weak_store = weak_store.clone(); move |thread, cx| { weak_store - .update(cx, |store, cx| { + .update(cx, |store, _cx| { let session_id = thread.session_id().clone(); store.session_subscriptions.remove(&session_id); - if thread.entries().is_empty() { - // Empty threads can be unloaded without ever being - // durably persisted by the underlying agent. - store.delete(session_id, cx); - } }) .ok(); } @@ -449,6 +494,7 @@ impl ThreadMetadataStore { db, threads: HashMap::default(), threads_by_paths: HashMap::default(), + threads_by_main_paths: HashMap::default(), reload_task: None, session_subscriptions: HashMap::default(), pending_thread_ops_tx: tx, @@ -494,6 +540,10 @@ impl ThreadMetadataStore { | AcpThreadEvent::Refusal | AcpThreadEvent::WorkingDirectoriesUpdated => { let thread_ref = thread.read(cx); + if thread_ref.entries().is_empty() { + return; + } + let existing_thread = self.threads.get(thread_ref.session_id()); let session_id = thread_ref.session_id().clone(); let title = thread_ref @@ -517,6 +567,20 @@ impl ThreadMetadataStore { PathList::new(&paths) }; + let main_worktree_paths = { + let project = thread_ref.project().read(cx); + let mut main_paths: Vec> = Vec::new(); + for repo in project.repositories(cx).values() { + let snapshot = repo.read(cx).snapshot(); + if snapshot.is_linked_worktree() { + main_paths.push(snapshot.original_repo_abs_path.clone()); + } + } + main_paths.sort(); + main_paths.dedup(); + PathList::new(&main_paths) + }; + // Threads without a folder path (e.g. started in an empty // window) are archived by default so they don't get lost, // because they won't show up in the sidebar. Users can reload @@ -532,6 +596,7 @@ impl ThreadMetadataStore { created_at: Some(created_at), updated_at, folder_paths, + main_worktree_paths, archived, }; @@ -567,6 +632,8 @@ impl Domain for ThreadMetadataDb { ) STRICT; ), sql!(ALTER TABLE sidebar_threads ADD COLUMN archived INTEGER DEFAULT 0), + sql!(ALTER TABLE sidebar_threads ADD COLUMN main_worktree_paths TEXT), + sql!(ALTER TABLE sidebar_threads ADD COLUMN main_worktree_paths_order TEXT), ]; } @@ -583,7 +650,7 @@ impl ThreadMetadataDb { /// List all sidebar thread metadata, ordered by updated_at descending. pub fn list(&self) -> anyhow::Result> { self.select::( - "SELECT session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived \ + "SELECT session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived, main_worktree_paths, main_worktree_paths_order \ FROM sidebar_threads \ ORDER BY updated_at DESC" )?() @@ -606,11 +673,18 @@ impl ThreadMetadataDb { } else { (Some(serialized.paths), Some(serialized.order)) }; + let main_serialized = row.main_worktree_paths.serialize(); + let (main_worktree_paths, main_worktree_paths_order) = if row.main_worktree_paths.is_empty() + { + (None, None) + } else { + (Some(main_serialized.paths), Some(main_serialized.order)) + }; let archived = row.archived; self.write(move |conn| { - let sql = "INSERT INTO sidebar_threads(session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived) \ - VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8) \ + let sql = "INSERT INTO sidebar_threads(session_id, agent_id, title, updated_at, created_at, folder_paths, folder_paths_order, archived, main_worktree_paths, main_worktree_paths_order) \ + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10) \ ON CONFLICT(session_id) DO UPDATE SET \ agent_id = excluded.agent_id, \ title = excluded.title, \ @@ -618,7 +692,9 @@ impl ThreadMetadataDb { created_at = excluded.created_at, \ folder_paths = excluded.folder_paths, \ folder_paths_order = excluded.folder_paths_order, \ - archived = excluded.archived"; + archived = excluded.archived, \ + main_worktree_paths = excluded.main_worktree_paths, \ + main_worktree_paths_order = excluded.main_worktree_paths_order"; let mut stmt = Statement::prepare(conn, sql)?; let mut i = stmt.bind(&id, 1)?; i = stmt.bind(&agent_id, i)?; @@ -627,7 +703,9 @@ impl ThreadMetadataDb { i = stmt.bind(&created_at, i)?; i = stmt.bind(&folder_paths, i)?; i = stmt.bind(&folder_paths_order, i)?; - stmt.bind(&archived, i)?; + i = stmt.bind(&archived, i)?; + i = stmt.bind(&main_worktree_paths, i)?; + stmt.bind(&main_worktree_paths_order, i)?; stmt.exec() }) .await @@ -657,6 +735,10 @@ impl Column for ThreadMetadata { let (folder_paths_order_str, next): (Option, i32) = Column::column(statement, next)?; let (archived, next): (bool, i32) = Column::column(statement, next)?; + let (main_worktree_paths_str, next): (Option, i32) = + Column::column(statement, next)?; + let (main_worktree_paths_order_str, next): (Option, i32) = + Column::column(statement, next)?; let agent_id = agent_id .map(|id| AgentId::new(id)) @@ -678,6 +760,15 @@ impl Column for ThreadMetadata { }) .unwrap_or_default(); + let main_worktree_paths = main_worktree_paths_str + .map(|paths| { + PathList::deserialize(&util::path_list::SerializedPathList { + paths, + order: main_worktree_paths_order_str.unwrap_or_default(), + }) + }) + .unwrap_or_default(); + Ok(( ThreadMetadata { session_id: acp::SessionId::new(id), @@ -686,6 +777,7 @@ impl Column for ThreadMetadata { updated_at, created_at, folder_paths, + main_worktree_paths, archived, }, next, @@ -742,6 +834,7 @@ mod tests { updated_at, created_at: Some(updated_at), folder_paths, + main_worktree_paths: PathList::default(), } } @@ -957,6 +1050,7 @@ mod tests { updated_at: now - chrono::Duration::seconds(10), created_at: Some(now - chrono::Duration::seconds(10)), folder_paths: project_a_paths.clone(), + main_worktree_paths: PathList::default(), archived: false, }; @@ -1066,6 +1160,7 @@ mod tests { updated_at: existing_updated_at, created_at: Some(existing_updated_at), folder_paths: project_paths.clone(), + main_worktree_paths: PathList::default(), archived: false, }; @@ -1197,7 +1292,7 @@ mod tests { } #[gpui::test] - async fn test_empty_thread_metadata_deleted_when_thread_released(cx: &mut TestAppContext) { + async fn test_empty_thread_events_do_not_create_metadata(cx: &mut TestAppContext) { init_test(cx); let fs = FakeFs::new(cx.executor()); @@ -1227,11 +1322,16 @@ mod tests { .entry_ids() .collect::>() }); - assert_eq!(metadata_ids, vec![session_id]); + assert!( + metadata_ids.is_empty(), + "expected empty draft thread title updates to be ignored" + ); - drop(thread); - cx.update(|_| {}); - cx.run_until_parked(); + cx.update(|cx| { + thread.update(cx, |thread, cx| { + thread.push_user_content_block(None, "Hello".into(), cx); + }); + }); cx.run_until_parked(); let metadata_ids = cx.update(|cx| { @@ -1240,10 +1340,7 @@ mod tests { .entry_ids() .collect::>() }); - assert!( - metadata_ids.is_empty(), - "expected empty draft thread metadata to be deleted on release" - ); + assert_eq!(metadata_ids, vec![session_id]); } #[gpui::test] @@ -1318,6 +1415,7 @@ mod tests { cx.update(|cx| { thread_without_worktree.update(cx, |thread, cx| { + thread.push_user_content_block(None, "content".into(), cx); thread.set_title("No Project Thread".into(), cx).detach(); }); }); @@ -1338,6 +1436,7 @@ mod tests { cx.update(|cx| { thread_with_worktree.update(cx, |thread, cx| { + thread.push_user_content_block(None, "content".into(), cx); thread.set_title("Project Thread".into(), cx).detach(); }); }); @@ -1393,6 +1492,7 @@ mod tests { // Set a title on the regular thread to trigger a save via handle_thread_update. cx.update(|cx| { regular_thread.update(cx, |thread, cx| { + thread.push_user_content_block(None, "content".into(), cx); thread.set_title("Regular Thread".into(), cx).detach(); }); }); diff --git a/crates/agent_ui/src/threads_archive_view.rs b/crates/agent_ui/src/threads_archive_view.rs index 9aca31e1edbe729fccecfc0dd8f0530d2aed2564..13b2aa1a37cd506c338d13db78bce751882e426a 100644 --- a/crates/agent_ui/src/threads_archive_view.rs +++ b/crates/agent_ui/src/threads_archive_view.rs @@ -91,14 +91,16 @@ impl TimeBucket { } fn fuzzy_match_positions(query: &str, text: &str) -> Option> { - let query = query.to_lowercase(); - let text_lower = text.to_lowercase(); let mut positions = Vec::new(); let mut query_chars = query.chars().peekable(); - for (i, c) in text_lower.chars().enumerate() { - if query_chars.peek() == Some(&c) { - positions.push(i); - query_chars.next(); + for (byte_idx, candidate_char) in text.char_indices() { + if let Some(&query_char) = query_chars.peek() { + if candidate_char.eq_ignore_ascii_case(&query_char) { + positions.push(byte_idx); + query_chars.next(); + } + } else { + break; } } if query_chars.peek().is_none() { @@ -216,6 +218,13 @@ impl ThreadsArchiveView { handle.focus(window, cx); } + pub fn is_filter_editor_focused(&self, window: &Window, cx: &App) -> bool { + self.filter_editor + .read(cx) + .focus_handle(cx) + .is_focused(window) + } + fn update_items(&mut self, cx: &mut Context) { let sessions = ThreadMetadataStore::global(cx) .read(cx) @@ -344,7 +353,6 @@ impl ThreadsArchiveView { .map(|mw| { mw.read(cx) .workspaces() - .iter() .filter_map(|ws| ws.read(cx).database_id()) .collect() }) @@ -1283,3 +1291,59 @@ impl PickerDelegate for ProjectPickerDelegate { ) } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_fuzzy_match_positions_returns_byte_indices() { + // "🔥abc" — the fire emoji is 4 bytes, so 'a' starts at byte 4, 'b' at 5, 'c' at 6. + let text = "🔥abc"; + let positions = fuzzy_match_positions("ab", text).expect("should match"); + assert_eq!(positions, vec![4, 5]); + + // Verify positions are valid char boundaries (this is the assertion that + // panicked before the fix). + for &pos in &positions { + assert!( + text.is_char_boundary(pos), + "position {pos} is not a valid UTF-8 boundary in {text:?}" + ); + } + } + + #[test] + fn test_fuzzy_match_positions_ascii_still_works() { + let positions = fuzzy_match_positions("he", "hello").expect("should match"); + assert_eq!(positions, vec![0, 1]); + } + + #[test] + fn test_fuzzy_match_positions_case_insensitive() { + let positions = fuzzy_match_positions("HE", "hello").expect("should match"); + assert_eq!(positions, vec![0, 1]); + } + + #[test] + fn test_fuzzy_match_positions_no_match() { + assert!(fuzzy_match_positions("xyz", "hello").is_none()); + } + + #[test] + fn test_fuzzy_match_positions_multi_byte_interior() { + // "café" — 'é' is 2 bytes (0xC3 0xA9), so 'f' starts at byte 4, 'é' at byte 5. + let text = "café"; + let positions = fuzzy_match_positions("fé", text).expect("should match"); + // 'c'=0, 'a'=1, 'f'=2, 'é'=3..4 — wait, let's verify: + // Actually: c=1 byte, a=1 byte, f=1 byte, é=2 bytes + // So byte positions: c=0, a=1, f=2, é=3 + assert_eq!(positions, vec![2, 3]); + for &pos in &positions { + assert!( + text.is_char_boundary(pos), + "position {pos} is not a valid UTF-8 boundary in {text:?}" + ); + } + } +} diff --git a/crates/agent_ui/src/ui/mention_crease.rs b/crates/agent_ui/src/ui/mention_crease.rs index 6e99647304d93fe91cd6b91dbd2bf3bfd82c7ab0..bd48a558f5d9b1f042f974dc6e174f8ba8078adf 100644 --- a/crates/agent_ui/src/ui/mention_crease.rs +++ b/crates/agent_ui/src/ui/mention_crease.rs @@ -184,7 +184,7 @@ fn open_mention_uri( MentionUri::Fetch { url } => { cx.open_url(url.as_str()); } - MentionUri::PastedImage + MentionUri::PastedImage { .. } | MentionUri::Selection { abs_path: None, .. } | MentionUri::Diagnostics { .. } | MentionUri::TerminalSelection { .. } diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index 1cb1e801c2cd68d442321da76c0abb848f9fa0d8..c168bd2956e0687eca5e5adeb16edbe70e9edd54 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -171,9 +171,9 @@ impl sum_tree::Item for PendingHunk { impl sum_tree::Summary for DiffHunkSummary { type Context<'a> = &'a text::BufferSnapshot; - fn zero(_cx: Self::Context<'_>) -> Self { + fn zero(buffer: &text::BufferSnapshot) -> Self { DiffHunkSummary { - buffer_range: Anchor::MIN..Anchor::MIN, + buffer_range: Anchor::min_min_range_for_buffer(buffer.remote_id()), diff_base_byte_range: 0..0, added_rows: 0, removed_rows: 0, @@ -248,6 +248,10 @@ impl BufferDiffSnapshot { buffer_diff.update(cx, |buffer_diff, cx| buffer_diff.snapshot(cx)) } + pub fn buffer_id(&self) -> BufferId { + self.inner.buffer_snapshot.remote_id() + } + pub fn is_empty(&self) -> bool { self.inner.hunks.is_empty() } @@ -953,7 +957,7 @@ impl BufferDiffInner { .flat_map(move |hunk| { [ ( - &hunk.buffer_range.start, + hunk.buffer_range.start, ( hunk.buffer_range.start, hunk.diff_base_byte_range.start, @@ -961,7 +965,7 @@ impl BufferDiffInner { ), ), ( - &hunk.buffer_range.end, + hunk.buffer_range.end, (hunk.buffer_range.end, hunk.diff_base_byte_range.end, hunk), ), ] @@ -1653,7 +1657,7 @@ impl BufferDiff { ) { let hunks = self .snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer) + .hunks_intersecting_range(Anchor::min_max_range_for_buffer(buffer.remote_id()), buffer) .collect::>(); let Some(secondary) = self.secondary_diff.clone() else { return; diff --git a/crates/call/src/call_impl/room.rs b/crates/call/src/call_impl/room.rs index f92a8163d54de0c21c7318c4baab5aad5ce49b75..37a3fd823ec03d3b1d94419ac47662431d718708 100644 --- a/crates/call/src/call_impl/room.rs +++ b/crates/call/src/call_impl/room.rs @@ -21,7 +21,7 @@ use language::LanguageRegistry; use livekit::{LocalTrackPublication, ParticipantIdentity, RoomEvent}; use livekit_client::{self as livekit, AudioStream, TrackSid}; use postage::{sink::Sink, stream::Stream, watch}; -use project::Project; +use project::{CURRENT_PROJECT_FEATURES, Project}; use settings::Settings as _; use std::sync::atomic::AtomicU64; use std::{future::Future, mem, rc::Rc, sync::Arc, time::Duration, time::Instant}; @@ -1237,6 +1237,10 @@ impl Room { worktrees: project.read(cx).worktree_metadata_protos(cx), is_ssh_project: project.read(cx).is_via_remote_server(), windows_paths: Some(project.read(cx).path_style(cx) == PathStyle::Windows), + features: CURRENT_PROJECT_FEATURES + .iter() + .map(|s| s.to_string()) + .collect(), }); cx.spawn(async move |this, cx| { diff --git a/crates/cli/src/cli.rs b/crates/cli/src/cli.rs index 1a3ce059b8116ac7438f3eb0330b47660cc863de..d8da78c53210230597dab49ce297d9fa694e62f1 100644 --- a/crates/cli/src/cli.rs +++ b/crates/cli/src/cli.rs @@ -21,6 +21,7 @@ pub enum CliRequest { reuse: bool, env: Option>, user_data_dir: Option, + dev_container: bool, }, } diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index b8af5896285d3080ca3320a5909b3f58f72de643..41f2d14c1908ac18e7ea297eef19d8d9bd1cf8b5 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -118,6 +118,12 @@ struct Args { /// Will attempt to give the correct command to run #[arg(long)] system_specs: bool, + /// Open the project in a dev container. + /// + /// Automatically triggers "Reopen in Dev Container" if a `.devcontainer/` + /// configuration is found in the project directory. + #[arg(long)] + dev_container: bool, /// Pairs of file paths to diff. Can be specified multiple times. /// When directories are provided, recurses into them and shows all changed files in a single multi-diff view. #[arg(long, action = clap::ArgAction::Append, num_args = 2, value_names = ["OLD_PATH", "NEW_PATH"])] @@ -670,6 +676,7 @@ fn main() -> Result<()> { reuse: args.reuse, env, user_data_dir: user_data_dir_for_thread, + dev_container: args.dev_container, })?; while let Ok(response) = rx.recv() { diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index 1edbb3399e4332e2ebd23f812c66697bda72d587..7bbaccb22e0e6c7508240186103e216f83be2f0c 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -22,6 +22,7 @@ base64.workspace = true chrono = { workspace = true, features = ["serde"] } clock.workspace = true cloud_api_client.workspace = true +cloud_api_types.workspace = true cloud_llm_client.workspace = true collections.workspace = true credentials_provider.workspace = true @@ -35,6 +36,7 @@ gpui_tokio.workspace = true http_client.workspace = true http_client_tls.workspace = true httparse = "1.10" +language_model.workspace = true log.workspace = true parking_lot.workspace = true paths.workspace = true @@ -60,6 +62,7 @@ tokio.workspace = true url.workspace = true util.workspace = true worktree.workspace = true +zed_credentials_provider.workspace = true [dev-dependencies] clock = { workspace = true, features = ["test-support"] } diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index f40d90a983978e8928477b5a2973dfa05e05b907..dfd9963a0ee52d167f8d4edb0b850f4debed7fd4 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1,6 +1,7 @@ #[cfg(any(test, feature = "test-support"))] pub mod test; +mod llm_token; mod proxy; pub mod telemetry; pub mod user; @@ -13,8 +14,9 @@ use async_tungstenite::tungstenite::{ http::{HeaderValue, Request, StatusCode}, }; use clock::SystemClock; -use cloud_api_client::CloudApiClient; use cloud_api_client::websocket_protocol::MessageToClient; +use cloud_api_client::{ClientApiError, CloudApiClient}; +use cloud_api_types::OrganizationId; use credentials_provider::CredentialsProvider; use feature_flags::FeatureFlagAppExt as _; use futures::{ @@ -24,6 +26,7 @@ use futures::{ }; use gpui::{App, AsyncApp, Entity, Global, Task, WeakEntity, actions}; use http_client::{HttpClient, HttpClientWithUrl, http, read_proxy_from_env}; +use language_model::LlmApiToken; use parking_lot::{Mutex, RwLock}; use postage::watch; use proxy::connect_proxy_stream; @@ -51,6 +54,7 @@ use tokio::net::TcpStream; use url::Url; use util::{ConnectionResult, ResultExt}; +pub use llm_token::*; pub use rpc::*; pub use telemetry_events::Event; pub use user::*; @@ -339,7 +343,7 @@ pub struct ClientCredentialsProvider { impl ClientCredentialsProvider { pub fn new(cx: &App) -> Self { Self { - provider: ::global(cx), + provider: zed_credentials_provider::global(cx), } } @@ -568,6 +572,10 @@ impl Client { self.http.clone() } + pub fn credentials_provider(&self) -> Arc { + self.credentials_provider.provider.clone() + } + pub fn cloud_client(&self) -> Arc { self.cloud_client.clone() } @@ -1513,6 +1521,66 @@ impl Client { }) } + pub async fn acquire_llm_token( + &self, + llm_token: &LlmApiToken, + organization_id: Option, + ) -> Result { + let system_id = self.telemetry().system_id().map(|x| x.to_string()); + let cloud_client = self.cloud_client(); + match llm_token + .acquire(&cloud_client, system_id, organization_id) + .await + { + Ok(token) => Ok(token), + Err(ClientApiError::Unauthorized) => { + self.request_sign_out(); + Err(ClientApiError::Unauthorized).context("Failed to create LLM token") + } + Err(err) => Err(anyhow::Error::from(err)), + } + } + + pub async fn refresh_llm_token( + &self, + llm_token: &LlmApiToken, + organization_id: Option, + ) -> Result { + let system_id = self.telemetry().system_id().map(|x| x.to_string()); + let cloud_client = self.cloud_client(); + match llm_token + .refresh(&cloud_client, system_id, organization_id) + .await + { + Ok(token) => Ok(token), + Err(ClientApiError::Unauthorized) => { + self.request_sign_out(); + return Err(ClientApiError::Unauthorized).context("Failed to create LLM token"); + } + Err(err) => return Err(anyhow::Error::from(err)), + } + } + + pub async fn clear_and_refresh_llm_token( + &self, + llm_token: &LlmApiToken, + organization_id: Option, + ) -> Result { + let system_id = self.telemetry().system_id().map(|x| x.to_string()); + let cloud_client = self.cloud_client(); + match llm_token + .clear_and_refresh(&cloud_client, system_id, organization_id) + .await + { + Ok(token) => Ok(token), + Err(ClientApiError::Unauthorized) => { + self.request_sign_out(); + return Err(ClientApiError::Unauthorized).context("Failed to create LLM token"); + } + Err(err) => return Err(anyhow::Error::from(err)), + } + } + pub async fn sign_out(self: &Arc, cx: &AsyncApp) { self.state.write().credentials = None; self.cloud_client.clear_credentials(); @@ -2141,11 +2209,13 @@ mod tests { project_id: 1, committer_name: None, committer_email: None, + features: Vec::new(), }); server.send(proto::JoinProject { project_id: 2, committer_name: None, committer_email: None, + features: Vec::new(), }); done_rx1.recv().await.unwrap(); done_rx2.recv().await.unwrap(); diff --git a/crates/client/src/llm_token.rs b/crates/client/src/llm_token.rs new file mode 100644 index 0000000000000000000000000000000000000000..f62aa6dd4dc3462bc3a0f6f46c35f0e4e5499816 --- /dev/null +++ b/crates/client/src/llm_token.rs @@ -0,0 +1,116 @@ +use super::{Client, UserStore}; +use cloud_api_types::websocket_protocol::MessageToClient; +use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME}; +use gpui::{ + App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _, Subscription, +}; +use language_model::LlmApiToken; +use std::sync::Arc; + +pub trait NeedsLlmTokenRefresh { + /// Returns whether the LLM token needs to be refreshed. + fn needs_llm_token_refresh(&self) -> bool; +} + +impl NeedsLlmTokenRefresh for http_client::Response { + fn needs_llm_token_refresh(&self) -> bool { + self.headers().get(EXPIRED_LLM_TOKEN_HEADER_NAME).is_some() + || self.headers().get(OUTDATED_LLM_TOKEN_HEADER_NAME).is_some() + } +} + +enum TokenRefreshMode { + Refresh, + ClearAndRefresh, +} + +pub fn global_llm_token(cx: &App) -> LlmApiToken { + RefreshLlmTokenListener::global(cx) + .read(cx) + .llm_api_token + .clone() +} + +struct GlobalRefreshLlmTokenListener(Entity); + +impl Global for GlobalRefreshLlmTokenListener {} + +pub struct LlmTokenRefreshedEvent; + +pub struct RefreshLlmTokenListener { + client: Arc, + user_store: Entity, + llm_api_token: LlmApiToken, + _subscription: Subscription, +} + +impl EventEmitter for RefreshLlmTokenListener {} + +impl RefreshLlmTokenListener { + pub fn register(client: Arc, user_store: Entity, cx: &mut App) { + let listener = cx.new(|cx| RefreshLlmTokenListener::new(client, user_store, cx)); + cx.set_global(GlobalRefreshLlmTokenListener(listener)); + } + + pub fn global(cx: &App) -> Entity { + GlobalRefreshLlmTokenListener::global(cx).0.clone() + } + + fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { + client.add_message_to_client_handler({ + let this = cx.weak_entity(); + move |message, cx| { + if let Some(this) = this.upgrade() { + Self::handle_refresh_llm_token(this, message, cx); + } + } + }); + + let subscription = cx.subscribe(&user_store, |this, _user_store, event, cx| { + if matches!(event, super::user::Event::OrganizationChanged) { + this.refresh(TokenRefreshMode::ClearAndRefresh, cx); + } + }); + + Self { + client, + user_store, + llm_api_token: LlmApiToken::default(), + _subscription: subscription, + } + } + + fn refresh(&self, mode: TokenRefreshMode, cx: &mut Context) { + let client = self.client.clone(); + let llm_api_token = self.llm_api_token.clone(); + let organization_id = self + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); + cx.spawn(async move |this, cx| { + match mode { + TokenRefreshMode::Refresh => { + client + .refresh_llm_token(&llm_api_token, organization_id) + .await?; + } + TokenRefreshMode::ClearAndRefresh => { + client + .clear_and_refresh_llm_token(&llm_api_token, organization_id) + .await?; + } + } + this.update(cx, |_this, cx| cx.emit(LlmTokenRefreshedEvent)) + }) + .detach_and_log_err(cx); + } + + fn handle_refresh_llm_token(this: Entity, message: &MessageToClient, cx: &mut App) { + match message { + MessageToClient::UserUpdated => { + this.update(cx, |this, cx| this.refresh(TokenRefreshMode::Refresh, cx)); + } + } + } +} diff --git a/crates/codestral/Cargo.toml b/crates/codestral/Cargo.toml index 0daaee8fb1420c76757ca898655e8dd1a5244d7e..801221d3128b8aa2d25175e086a741d5d85da626 100644 --- a/crates/codestral/Cargo.toml +++ b/crates/codestral/Cargo.toml @@ -22,6 +22,7 @@ log.workspace = true serde.workspace = true serde_json.workspace = true text.workspace = true +zed_credentials_provider.workspace = true zeta_prompt.workspace = true [dev-dependencies] diff --git a/crates/codestral/src/codestral.rs b/crates/codestral/src/codestral.rs index 3930e2e873a91618bfae456bc188bbd90ffa64b9..7685fa8f5b1eae9e98a621484602e199c2b76f96 100644 --- a/crates/codestral/src/codestral.rs +++ b/crates/codestral/src/codestral.rs @@ -48,9 +48,10 @@ pub fn codestral_api_key(cx: &App) -> Option> { } pub fn load_codestral_api_key(cx: &mut App) -> Task> { + let credentials_provider = zed_credentials_provider::global(cx); let api_url = codestral_api_url(cx); codestral_api_key_state(cx).update(cx, |key_state, cx| { - key_state.load_if_needed(api_url, |s| s, cx) + key_state.load_if_needed(api_url, |s| s, credentials_provider, cx) }) } diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 75d7dbf194068f78b3d566e54bb0fa18f66a9878..75175372f24a83cfb50e8f87deae93e3f03e1a8a 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -48,7 +48,8 @@ CREATE TABLE "projects" ( "host_connection_id" INTEGER, "host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE, "unregistered" BOOLEAN NOT NULL DEFAULT FALSE, - "windows_paths" BOOLEAN NOT NULL DEFAULT FALSE + "windows_paths" BOOLEAN NOT NULL DEFAULT FALSE, + "features" TEXT NOT NULL DEFAULT '' ); CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id"); @@ -64,6 +65,7 @@ CREATE TABLE "worktrees" ( "scan_id" INTEGER NOT NULL, "is_complete" BOOL NOT NULL DEFAULT FALSE, "completed_scan_id" INTEGER NOT NULL, + "root_repo_common_dir" VARCHAR, PRIMARY KEY (project_id, id) ); diff --git a/crates/collab/migrations/20251208000000_test_schema.sql b/crates/collab/migrations/20251208000000_test_schema.sql index 394deaf2c0d6a80a2ab6ab1b95a333081c816e23..0110dd149b1143a3edcf76a1e0b18fbf1a22287c 100644 --- a/crates/collab/migrations/20251208000000_test_schema.sql +++ b/crates/collab/migrations/20251208000000_test_schema.sql @@ -332,7 +332,8 @@ CREATE TABLE public.projects ( room_id integer, host_connection_id integer, host_connection_server_id integer, - windows_paths boolean DEFAULT false + windows_paths boolean DEFAULT false, + features text NOT NULL DEFAULT '' ); CREATE SEQUENCE public.projects_id_seq @@ -483,7 +484,8 @@ CREATE TABLE public.worktrees ( visible boolean NOT NULL, scan_id bigint NOT NULL, is_complete boolean DEFAULT false NOT NULL, - completed_scan_id bigint + completed_scan_id bigint, + root_repo_common_dir character varying ); ALTER TABLE ONLY public.breakpoints ALTER COLUMN id SET DEFAULT nextval('public.breakpoints_id_seq'::regclass); diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index d8803c253f5feef8ef5e040f3ea112abcc688f52..44abc37af66e3f169d3af1a7d5e29063e382c620 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -559,6 +559,7 @@ pub struct RejoinedWorktree { pub settings_files: Vec, pub scan_id: u64, pub completed_scan_id: u64, + pub root_repo_common_dir: Option, } pub struct LeftRoom { @@ -589,6 +590,7 @@ pub struct Project { pub repositories: Vec, pub language_servers: Vec, pub path_style: PathStyle, + pub features: Vec, } pub struct ProjectCollaborator { @@ -637,6 +639,7 @@ pub struct Worktree { pub settings_files: Vec, pub scan_id: u64, pub completed_scan_id: u64, + pub root_repo_common_dir: Option, } #[derive(Debug)] diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 71365fb3846c1dccbf527d76779ed8816bde243b..b1ea638072a30d6b881a711448223449aa9f53e2 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -34,6 +34,7 @@ impl Database { worktrees: &[proto::WorktreeMetadata], is_ssh_project: bool, windows_paths: bool, + features: &[String], ) -> Result> { self.room_transaction(room_id, |tx| async move { let participant = room_participant::Entity::find() @@ -71,6 +72,7 @@ impl Database { ))), id: ActiveValue::NotSet, windows_paths: ActiveValue::set(windows_paths), + features: ActiveValue::set(serde_json::to_string(features).unwrap()), } .insert(&*tx) .await?; @@ -85,6 +87,7 @@ impl Database { visible: ActiveValue::set(worktree.visible), scan_id: ActiveValue::set(0), completed_scan_id: ActiveValue::set(0), + root_repo_common_dir: ActiveValue::set(None), } })) .exec(&*tx) @@ -201,6 +204,7 @@ impl Database { visible: ActiveValue::set(worktree.visible), scan_id: ActiveValue::set(0), completed_scan_id: ActiveValue::set(0), + root_repo_common_dir: ActiveValue::set(None), })) .on_conflict( OnConflict::columns([worktree::Column::ProjectId, worktree::Column::Id]) @@ -264,6 +268,7 @@ impl Database { ActiveValue::default() }, abs_path: ActiveValue::set(update.abs_path.clone()), + root_repo_common_dir: ActiveValue::set(update.root_repo_common_dir.clone()), ..Default::default() }) .exec(&*tx) @@ -759,6 +764,7 @@ impl Database { settings_files: Default::default(), scan_id: db_worktree.scan_id as u64, completed_scan_id: db_worktree.completed_scan_id as u64, + root_repo_common_dir: db_worktree.root_repo_common_dir, legacy_repository_entries: Default::default(), }, ) @@ -948,6 +954,7 @@ impl Database { } else { PathStyle::Posix }; + let features: Vec = serde_json::from_str(&project.features).unwrap_or_default(); let project = Project { id: project.id, @@ -977,6 +984,7 @@ impl Database { }) .collect(), path_style, + features, }; Ok((project, replica_id as ReplicaId)) } diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index 3197d142cba7a1969e6fdb9423dc94497f6ca53c..94e003fd2d27c97a53f66606d11ed2e15609b728 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -629,6 +629,7 @@ impl Database { settings_files: Default::default(), scan_id: db_worktree.scan_id as u64, completed_scan_id: db_worktree.completed_scan_id as u64, + root_repo_common_dir: db_worktree.root_repo_common_dir, }; let rejoined_worktree = rejoined_project diff --git a/crates/collab/src/db/tables/project.rs b/crates/collab/src/db/tables/project.rs index 11a9b972ebcd7af29d6e6c234096384ce9ff7701..76d399cfc6445ca7c2516cc4cd76e885230868af 100644 --- a/crates/collab/src/db/tables/project.rs +++ b/crates/collab/src/db/tables/project.rs @@ -13,6 +13,7 @@ pub struct Model { pub host_connection_id: Option, pub host_connection_server_id: Option, pub windows_paths: bool, + pub features: String, } impl Model { diff --git a/crates/collab/src/db/tables/worktree.rs b/crates/collab/src/db/tables/worktree.rs index 46d9877dff152cdc3b30531606febec65595fec1..f67a9749a48e51fce81f97ad2faf8609c50a0204 100644 --- a/crates/collab/src/db/tables/worktree.rs +++ b/crates/collab/src/db/tables/worktree.rs @@ -15,6 +15,7 @@ pub struct Model { pub scan_id: i64, /// The last scan that fully completed. pub completed_scan_id: i64, + pub root_repo_common_dir: Option, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 3c4efe0580c18c938f8245de9f40bf216bab9c81..20316fc3403de0e6212d13d455c5b619000d71b1 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -435,6 +435,7 @@ impl Server { .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_read_only_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(disallow_guest_request::) .add_request_handler(disallow_guest_request::) @@ -1485,6 +1486,7 @@ fn notify_rejoined_projects( worktree_id: worktree.id, abs_path: worktree.abs_path.clone(), root_name: worktree.root_name, + root_repo_common_dir: worktree.root_repo_common_dir, updated_entries: worktree.updated_entries, removed_entries: worktree.removed_entries, scan_id: worktree.scan_id, @@ -1775,6 +1777,7 @@ async fn share_project( &request.worktrees, request.is_ssh_project, request.windows_paths.unwrap_or(false), + &request.features, ) .await?; response.send(proto::ShareProjectResponse { @@ -1840,6 +1843,28 @@ async fn join_project( tracing::info!(%project_id, "join project"); let db = session.db().await; + let project_model = db.get_project(project_id).await?; + let host_features: Vec = + serde_json::from_str(&project_model.features).unwrap_or_default(); + let guest_features: HashSet<_> = request.features.iter().collect(); + let host_features_set: HashSet<_> = host_features.iter().collect(); + if guest_features != host_features_set { + let host_connection_id = project_model.host_connection()?; + let mut pool = session.connection_pool().await; + let host_version = pool + .connection(host_connection_id) + .map(|c| c.zed_version.to_string()); + let guest_version = pool + .connection(session.connection_id) + .map(|c| c.zed_version.to_string()); + drop(pool); + Err(anyhow!( + "The host (v{}) and guest (v{}) are using incompatible versions of Zed. The peer with the older version must update to collaborate.", + host_version.as_deref().unwrap_or("unknown"), + guest_version.as_deref().unwrap_or("unknown"), + ))?; + } + let (project, replica_id) = &mut *db .join_project( project_id, @@ -1850,6 +1875,7 @@ async fn join_project( ) .await?; drop(db); + tracing::info!(%project_id, "join remote project"); let collaborators = project .collaborators @@ -1909,6 +1935,7 @@ async fn join_project( language_server_capabilities, role: project.role.into(), windows_paths: project.path_style == PathStyle::Windows, + features: project.features.clone(), })?; for (worktree_id, worktree) in mem::take(&mut project.worktrees) { @@ -1918,6 +1945,7 @@ async fn join_project( worktree_id, abs_path: worktree.abs_path.clone(), root_name: worktree.root_name, + root_repo_common_dir: worktree.root_repo_common_dir, updated_entries: worktree.entries, removed_entries: Default::default(), scan_id: worktree.scan_id, diff --git a/crates/collab/tests/integration/channel_buffer_tests.rs b/crates/collab/tests/integration/channel_buffer_tests.rs index a5aca7dd82ca23b1c348bea1fff5d2da2870c654..dd8ae9a2c02cfae6c6c7e8c369308c5092be113e 100644 --- a/crates/collab/tests/integration/channel_buffer_tests.rs +++ b/crates/collab/tests/integration/channel_buffer_tests.rs @@ -313,7 +313,7 @@ fn assert_remote_selections( let snapshot = editor.snapshot(window, cx); let hub = editor.collaboration_hub().unwrap(); let collaborators = hub.collaborators(cx); - let range = Anchor::min()..Anchor::max(); + let range = Anchor::Min..Anchor::Max; let remote_selections = snapshot .remote_selections_in_range(&range, hub, cx) .map(|s| { diff --git a/crates/collab/tests/integration/db_tests/db_tests.rs b/crates/collab/tests/integration/db_tests/db_tests.rs index e2006b7fb9984c4bd0cf16a62e9321b2f7007e9e..710f95dbf7d82e05a541b844b093a04ca88565f7 100644 --- a/crates/collab/tests/integration/db_tests/db_tests.rs +++ b/crates/collab/tests/integration/db_tests/db_tests.rs @@ -350,20 +350,41 @@ async fn test_project_count(db: &Arc) { .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0); - db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, false) - .await - .unwrap(); + db.share_project( + room_id, + ConnectionId { owner_id, id: 1 }, + &[], + false, + false, + &[], + ) + .await + .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1); - db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[], false, false) - .await - .unwrap(); + db.share_project( + room_id, + ConnectionId { owner_id, id: 1 }, + &[], + false, + false, + &[], + ) + .await + .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); // Projects shared by admins aren't counted. - db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[], false, false) - .await - .unwrap(); + db.share_project( + room_id, + ConnectionId { owner_id, id: 0 }, + &[], + false, + false, + &[], + ) + .await + .unwrap(); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); db.leave_room(ConnectionId { owner_id, id: 1 }) diff --git a/crates/collab/tests/integration/following_tests.rs b/crates/collab/tests/integration/following_tests.rs index c4031788c87f747c3125f4dbc509d68ea3720b43..7109b0f31452d2573426aa2300e7967b8f5a6601 100644 --- a/crates/collab/tests/integration/following_tests.rs +++ b/crates/collab/tests/integration/following_tests.rs @@ -2184,6 +2184,7 @@ async fn test_following_after_replacement(cx_a: &mut TestAppContext, cx_b: &mut ); mb }); + let multibuffer_snapshot = multibuffer.update(cx_a, |mb, cx| mb.snapshot(cx)); let snapshot = buffer.update(cx_a, |buffer, _| buffer.snapshot()); let editor: Entity = cx_a.new_window_entity(|window, cx| { Editor::for_multibuffer( @@ -2205,7 +2206,13 @@ async fn test_following_after_replacement(cx_a: &mut TestAppContext, cx_b: &mut editor .selections .disjoint_anchor_ranges() - .map(|range| range.start.text_anchor.to_point(&snapshot)) + .map(|range| { + multibuffer_snapshot + .anchor_to_buffer_anchor(range.start) + .unwrap() + .0 + .to_point(&snapshot) + }) .collect::>() }); multibuffer.update(cx_a, |multibuffer, cx| { @@ -2232,7 +2239,13 @@ async fn test_following_after_replacement(cx_a: &mut TestAppContext, cx_b: &mut editor .selections .disjoint_anchor_ranges() - .map(|range| range.start.text_anchor.to_point(&snapshot)) + .map(|range| { + multibuffer_snapshot + .anchor_to_buffer_anchor(range.start) + .unwrap() + .0 + .to_point(&snapshot) + }) .collect::>() }); assert_eq!(positions, new_positions); diff --git a/crates/collab/tests/integration/git_tests.rs b/crates/collab/tests/integration/git_tests.rs index 4af1355352554ee6e3350806cefe0b4cd41cf5d6..2fa67b072f1c3d49ef5ca1b90056fd08d57df1ba 100644 --- a/crates/collab/tests/integration/git_tests.rs +++ b/crates/collab/tests/integration/git_tests.rs @@ -1,4 +1,4 @@ -use std::path::{Path, PathBuf}; +use std::path::{self, Path, PathBuf}; use call::ActiveCall; use client::RECEIVE_TIMEOUT; @@ -17,6 +17,61 @@ use workspace::{MultiWorkspace, Workspace}; use crate::TestServer; +#[gpui::test] +async fn test_root_repo_common_dir_sync( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + // Set up a project whose root IS a git repository. + client_a + .fs() + .insert_tree( + path!("/project"), + json!({ ".git": {}, "file.txt": "content" }), + ) + .await; + + let (project_a, _) = client_a.build_local_project(path!("/project"), cx_a).await; + executor.run_until_parked(); + + // Host should see root_repo_common_dir pointing to .git at the root. + let host_common_dir = project_a.read_with(cx_a, |project, cx| { + let worktree = project.worktrees(cx).next().unwrap(); + worktree.read(cx).snapshot().root_repo_common_dir().cloned() + }); + assert_eq!( + host_common_dir.as_deref(), + Some(path::Path::new(path!("/project/.git"))), + ); + + // Share the project and have client B join. + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.join_remote_project(project_id, cx_b).await; + executor.run_until_parked(); + + // Guest should see the same root_repo_common_dir as the host. + let guest_common_dir = project_b.read_with(cx_b, |project, cx| { + let worktree = project.worktrees(cx).next().unwrap(); + worktree.read(cx).snapshot().root_repo_common_dir().cloned() + }); + assert_eq!( + guest_common_dir, host_common_dir, + "guest should see the same root_repo_common_dir as host", + ); +} + fn collect_diff_stats( panel: &gpui::Entity, cx: &C, @@ -369,6 +424,58 @@ async fn test_remote_git_worktrees( ); } +#[gpui::test] +async fn test_remote_git_head_sha( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + path!("/project"), + json!({ ".git": {}, "file.txt": "content" }), + ) + .await; + + let (project_a, _) = client_a.build_local_project(path!("/project"), cx_a).await; + let local_head_sha = cx_a.update(|cx| { + project_a + .read(cx) + .active_repository(cx) + .unwrap() + .update(cx, |repository, _| repository.head_sha()) + }); + let local_head_sha = local_head_sha.await.unwrap().unwrap(); + + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + executor.run_until_parked(); + + let remote_head_sha = cx_b.update(|cx| { + project_b + .read(cx) + .active_repository(cx) + .unwrap() + .update(cx, |repository, _| repository.head_sha()) + }); + let remote_head_sha = remote_head_sha.await.unwrap(); + + assert_eq!(remote_head_sha.unwrap(), local_head_sha); +} + #[gpui::test] async fn test_linked_worktrees_sync( executor: BackgroundExecutor, @@ -394,29 +501,29 @@ async fn test_linked_worktrees_sync( ) .await; - client_a - .fs() - .with_git_state(Path::new(path!("/project/.git")), true, |state| { - state.worktrees.push(GitWorktree { - path: PathBuf::from(path!("/project")), - ref_name: Some("refs/heads/main".into()), - sha: "aaa111".into(), - is_main: false, - }); - state.worktrees.push(GitWorktree { - path: PathBuf::from(path!("/project/feature-branch")), - ref_name: Some("refs/heads/feature-branch".into()), - sha: "bbb222".into(), - is_main: false, - }); - state.worktrees.push(GitWorktree { - path: PathBuf::from(path!("/project/bugfix-branch")), - ref_name: Some("refs/heads/bugfix-branch".into()), - sha: "ccc333".into(), - is_main: false, - }); - }) - .unwrap(); + let fs = client_a.fs(); + fs.add_linked_worktree_for_repo( + Path::new(path!("/project/.git")), + true, + GitWorktree { + path: PathBuf::from(path!("/worktrees/feature-branch")), + ref_name: Some("refs/heads/feature-branch".into()), + sha: "bbb222".into(), + is_main: false, + }, + ) + .await; + fs.add_linked_worktree_for_repo( + Path::new(path!("/project/.git")), + true, + GitWorktree { + path: PathBuf::from(path!("/worktrees/bugfix-branch")), + ref_name: Some("refs/heads/bugfix-branch".into()), + sha: "ccc333".into(), + is_main: false, + }, + ) + .await; let (project_a, _) = client_a.build_local_project(path!("/project"), cx_a).await; @@ -437,22 +544,22 @@ async fn test_linked_worktrees_sync( ); assert_eq!( host_linked[0].path, - PathBuf::from(path!("/project/feature-branch")) + PathBuf::from(path!("/worktrees/bugfix-branch")) ); assert_eq!( host_linked[0].ref_name, - Some("refs/heads/feature-branch".into()) + Some("refs/heads/bugfix-branch".into()) ); - assert_eq!(host_linked[0].sha.as_ref(), "bbb222"); + assert_eq!(host_linked[0].sha.as_ref(), "ccc333"); assert_eq!( host_linked[1].path, - PathBuf::from(path!("/project/bugfix-branch")) + PathBuf::from(path!("/worktrees/feature-branch")) ); assert_eq!( host_linked[1].ref_name, - Some("refs/heads/bugfix-branch".into()) + Some("refs/heads/feature-branch".into()) ); - assert_eq!(host_linked[1].sha.as_ref(), "ccc333"); + assert_eq!(host_linked[1].sha.as_ref(), "bbb222"); // Share the project and have client B join. let project_id = active_call_a @@ -478,15 +585,17 @@ async fn test_linked_worktrees_sync( // Now mutate: add a third linked worktree on the host side. client_a .fs() - .with_git_state(Path::new(path!("/project/.git")), true, |state| { - state.worktrees.push(GitWorktree { - path: PathBuf::from(path!("/project/hotfix-branch")), + .add_linked_worktree_for_repo( + Path::new(path!("/project/.git")), + true, + GitWorktree { + path: PathBuf::from(path!("/worktrees/hotfix-branch")), ref_name: Some("refs/heads/hotfix-branch".into()), sha: "ddd444".into(), is_main: false, - }); - }) - .unwrap(); + }, + ) + .await; // Wait for the host to re-scan and propagate the update. executor.run_until_parked(); @@ -504,7 +613,7 @@ async fn test_linked_worktrees_sync( ); assert_eq!( host_linked_updated[2].path, - PathBuf::from(path!("/project/hotfix-branch")) + PathBuf::from(path!("/worktrees/hotfix-branch")) ); // Verify the guest also received the update. @@ -521,12 +630,12 @@ async fn test_linked_worktrees_sync( // Now mutate: remove one linked worktree from the host side. client_a .fs() - .with_git_state(Path::new(path!("/project/.git")), true, |state| { - state - .worktrees - .retain(|wt| wt.ref_name != Some("refs/heads/bugfix-branch".into())); - }) - .unwrap(); + .remove_worktree_for_repo( + Path::new(path!("/project/.git")), + true, + "refs/heads/bugfix-branch", + ) + .await; executor.run_until_parked(); diff --git a/crates/collab/tests/integration/remote_editing_collaboration_tests.rs b/crates/collab/tests/integration/remote_editing_collaboration_tests.rs index fe93a06f7265d102d8727466c46e83daf066e506..0796323fc5b3d8f6b1cbcb0e108a7d573240f446 100644 --- a/crates/collab/tests/integration/remote_editing_collaboration_tests.rs +++ b/crates/collab/tests/integration/remote_editing_collaboration_tests.rs @@ -469,7 +469,7 @@ async fn test_ssh_collaboration_git_worktrees( .unwrap(); assert_eq!(worktrees.len(), 1); - let worktree_directory = PathBuf::from("/project"); + let worktree_directory = PathBuf::from("/worktrees"); cx_b.update(|cx| { repo_b.update(cx, |repo, _| { repo.create_worktree( @@ -536,8 +536,8 @@ async fn test_ssh_collaboration_git_worktrees( cx_a.update(|cx| { repo_a.update(cx, |repository, _| { repository.rename_worktree( - PathBuf::from("/project/feature-branch"), - PathBuf::from("/project/renamed-branch"), + PathBuf::from("/worktrees/feature-branch"), + PathBuf::from("/worktrees/renamed-branch"), ) }) }) @@ -559,7 +559,7 @@ async fn test_ssh_collaboration_git_worktrees( ); assert_eq!( host_worktrees[1].path, - PathBuf::from("/project/renamed-branch") + PathBuf::from("/worktrees/renamed-branch") ); let server_worktrees = { @@ -588,13 +588,13 @@ async fn test_ssh_collaboration_git_worktrees( ); assert_eq!( server_worktrees[1].path, - PathBuf::from("/project/renamed-branch") + PathBuf::from("/worktrees/renamed-branch") ); // Host (client A) removes the renamed worktree via SSH cx_a.update(|cx| { repo_a.update(cx, |repository, _| { - repository.remove_worktree(PathBuf::from("/project/renamed-branch"), false) + repository.remove_worktree(PathBuf::from("/worktrees/renamed-branch"), false) }) }) .await diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 6a53e590586ec2353feafe267501619e8bbfcc71..8d0cdf351163dadf0ac8cbf6a8dc04886f30f583 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -13,12 +13,13 @@ use db::kvp::KeyValueStore; use editor::{Editor, EditorElement, EditorStyle}; use fuzzy::{StringMatch, StringMatchCandidate, match_strings}; use gpui::{ - AnyElement, App, AsyncWindowContext, Bounds, ClickEvent, ClipboardItem, Context, DismissEvent, - Div, Entity, EventEmitter, FocusHandle, Focusable, FontStyle, InteractiveElement, IntoElement, - KeyContext, ListOffset, ListState, MouseDownEvent, ParentElement, Pixels, Point, PromptLevel, - Render, SharedString, Styled, Subscription, Task, TextStyle, WeakEntity, Window, actions, - anchored, canvas, deferred, div, fill, list, point, prelude::*, px, + AnyElement, App, AsyncWindowContext, Bounds, ClickEvent, ClipboardItem, DismissEvent, Div, + Empty, Entity, EventEmitter, FocusHandle, Focusable, FontStyle, KeyContext, ListOffset, + ListState, MouseDownEvent, Pixels, Point, PromptLevel, SharedString, Subscription, Task, + TextStyle, WeakEntity, Window, actions, anchored, canvas, deferred, div, fill, list, point, + prelude::*, px, }; + use menu::{Cancel, Confirm, SecondaryConfirm, SelectNext, SelectPrevious}; use project::{Fs, Project}; use rpc::{ @@ -43,6 +44,9 @@ use workspace::{ notifications::{DetachAndPromptErr, NotifyResultExt}, }; +const FILTER_OCCUPIED_CHANNELS_KEY: &str = "filter_occupied_channels"; +const FAVORITE_CHANNELS_KEY: &str = "favorite_channels"; + actions!( collab_panel, [ @@ -243,7 +247,9 @@ pub struct CollabPanel { fs: Arc, focus_handle: FocusHandle, channel_clipboard: Option, - pending_serialization: Task>, + pending_panel_serialization: Task>, + pending_favorites_serialization: Task>, + pending_filter_serialization: Task>, context_menu: Option<(Entity, Point, Subscription)>, list_state: ListState, filter_editor: Entity, @@ -259,7 +265,7 @@ pub struct CollabPanel { subscriptions: Vec, collapsed_sections: Vec
, collapsed_channels: Vec, - filter_active_channels: bool, + filter_occupied_channels: bool, workspace: WeakEntity, } @@ -377,7 +383,9 @@ impl CollabPanel { focus_handle: cx.focus_handle(), channel_clipboard: None, fs: workspace.app_state().fs.clone(), - pending_serialization: Task::ready(None), + pending_panel_serialization: Task::ready(None), + pending_favorites_serialization: Task::ready(None), + pending_filter_serialization: Task::ready(None), context_menu: None, list_state: ListState::new(0, gpui::ListAlignment::Top, px(1000.)), channel_name_editor, @@ -392,7 +400,7 @@ impl CollabPanel { match_candidates: Vec::default(), collapsed_sections: vec![Section::Offline], collapsed_channels: Vec::default(), - filter_active_channels: false, + filter_occupied_channels: false, workspace: workspace.weak_handle(), client: workspace.app_state().client.clone(), }; @@ -473,8 +481,22 @@ impl CollabPanel { }); } + let filter_occupied_channels = KeyValueStore::global(cx) + .read_kvp(FILTER_OCCUPIED_CHANNELS_KEY) + .ok() + .flatten() + .is_some(); + + panel.update(cx, |panel, cx| { + panel.filter_occupied_channels = filter_occupied_channels; + + if filter_occupied_channels { + panel.update_entries(false, cx); + } + }); + let favorites: Vec = KeyValueStore::global(cx) - .read_kvp("favorite_channels") + .read_kvp(FAVORITE_CHANNELS_KEY) .ok() .flatten() .and_then(|json| serde_json::from_str::>(&json).ok()) @@ -519,7 +541,7 @@ impl CollabPanel { }; let kvp = KeyValueStore::global(cx); - self.pending_serialization = cx.background_spawn( + self.pending_panel_serialization = cx.background_spawn( async move { kvp.write_kvp( serialization_key, @@ -779,14 +801,14 @@ impl CollabPanel { channels.retain(|chan| channel_ids_of_matches_or_parents.contains(&chan.id)); - if self.filter_active_channels { - let active_channel_ids_or_ancestors: HashSet<_> = channel_store + if self.filter_occupied_channels { + let occupied_channel_ids_or_ancestors: HashSet<_> = channel_store .ordered_channels() .map(|(_, channel)| channel) .filter(|channel| !channel_store.channel_participants(channel.id).is_empty()) .flat_map(|channel| channel.parent_path.iter().copied().chain(Some(channel.id))) .collect(); - channels.retain(|channel| active_channel_ids_or_ancestors.contains(&channel.id)); + channels.retain(|channel| occupied_channel_ids_or_ancestors.contains(&channel.id)); } if let Some(state) = &self.channel_editing_state @@ -795,7 +817,7 @@ impl CollabPanel { self.entries.push(ListEntry::ChannelEditor { depth: 0 }); } - let should_respect_collapse = query.is_empty() && !self.filter_active_channels; + let should_respect_collapse = query.is_empty() && !self.filter_occupied_channels; let mut collapse_depth = None; for (idx, channel) in channels.into_iter().enumerate() { @@ -1091,27 +1113,30 @@ impl CollabPanel { room.read(cx).local_participant().role == proto::ChannelRole::Admin }); + let end_slot = if is_pending { + Label::new("Calling").color(Color::Muted).into_any_element() + } else if is_current_user { + IconButton::new("leave-call", IconName::Exit) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Leave Call")) + .on_click(move |_, window, cx| Self::leave_call(window, cx)) + .into_any_element() + } else if role == proto::ChannelRole::Guest { + Label::new("Guest").color(Color::Muted).into_any_element() + } else if role == proto::ChannelRole::Talker { + Label::new("Mic only") + .color(Color::Muted) + .into_any_element() + } else { + Empty.into_any_element() + }; + ListItem::new(user.github_login.clone()) .start_slot(Avatar::new(user.avatar_uri.clone())) .child(render_participant_name_and_handle(user)) .toggle_state(is_selected) - .end_slot(if is_pending { - Label::new("Calling").color(Color::Muted).into_any_element() - } else if is_current_user { - IconButton::new("leave-call", IconName::Exit) - .style(ButtonStyle::Subtle) - .on_click(move |_, window, cx| Self::leave_call(window, cx)) - .tooltip(Tooltip::text("Leave Call")) - .into_any_element() - } else if role == proto::ChannelRole::Guest { - Label::new("Guest").color(Color::Muted).into_any_element() - } else if role == proto::ChannelRole::Talker { - Label::new("Mic only") - .color(Color::Muted) - .into_any_element() - } else { - div().into_any_element() - }) + .end_slot(end_slot) + .tooltip(Tooltip::text("Click to Follow")) .when_some(peer_id, |el, peer_id| { if role == proto::ChannelRole::Guest { return el; @@ -1156,6 +1181,7 @@ impl CollabPanel { .into(); ListItem::new(project_id as usize) + .height(px(24.)) .toggle_state(is_selected) .on_click(cx.listener(move |this, _, window, cx| { this.workspace @@ -1166,16 +1192,20 @@ impl CollabPanel { "Failed to join project", window, cx, - |_, _, _| None, + |error, _, _| Some(format!("{error:#}")), ); }) .ok(); })) .start_slot( h_flex() - .gap_1() + .gap_1p5() .child(render_tree_branch(is_last, false, window, cx)) - .child(IconButton::new(0, IconName::Folder)), + .child( + Icon::new(IconName::Folder) + .size(IconSize::Small) + .color(Color::Muted), + ), ) .child(Label::new(project_name.clone())) .tooltip(Tooltip::text(format!("Open {}", project_name))) @@ -1192,12 +1222,17 @@ impl CollabPanel { let id = peer_id.map_or(usize::MAX, |id| id.as_u64() as usize); ListItem::new(("screen", id)) + .height(px(24.)) .toggle_state(is_selected) .start_slot( h_flex() - .gap_1() + .gap_1p5() .child(render_tree_branch(is_last, false, window, cx)) - .child(IconButton::new(0, IconName::Screen)), + .child( + Icon::new(IconName::Screen) + .size(IconSize::Small) + .color(Color::Muted), + ), ) .child(Label::new("Screen")) .when_some(peer_id, |this, _| { @@ -1208,7 +1243,7 @@ impl CollabPanel { }) .ok(); })) - .tooltip(Tooltip::text("Open shared screen")) + .tooltip(Tooltip::text("Open Shared Screen")) }) } @@ -1232,7 +1267,9 @@ impl CollabPanel { ) -> impl IntoElement { let channel_store = self.channel_store.read(cx); let has_channel_buffer_changed = channel_store.has_channel_buffer_changed(channel_id); + ListItem::new("channel-notes") + .height(px(24.)) .toggle_state(is_selected) .on_click(cx.listener(move |this, _, window, cx| { this.open_channel_notes(channel_id, window, cx); @@ -1240,17 +1277,25 @@ impl CollabPanel { .start_slot( h_flex() .relative() - .gap_1() + .gap_1p5() .child(render_tree_branch(false, true, window, cx)) - .child(IconButton::new(0, IconName::File)) - .children(has_channel_buffer_changed.then(|| { - div() - .w_1p5() - .absolute() - .right(px(2.)) - .top(px(2.)) - .child(Indicator::dot().color(Color::Info)) - })), + .child( + h_flex() + .child( + Icon::new(IconName::Reader) + .size(IconSize::Small) + .color(Color::Muted), + ) + .when(has_channel_buffer_changed, |this| { + this.child( + div() + .absolute() + .top_neg_0p5() + .right_0() + .child(Indicator::dot().color(Color::Info)), + ) + }), + ), ) .child(Label::new("notes")) .tooltip(Tooltip::text("Open Channel Notes")) @@ -1729,7 +1774,7 @@ impl CollabPanel { "Failed to join project", window, cx, - |_, _, _| None, + |error, _, _| Some(format!("{error:#}")), ); } } @@ -1946,6 +1991,26 @@ impl CollabPanel { self.channel_store.read(cx).is_channel_favorited(channel_id) } + fn persist_filter_occupied_channels(&mut self, cx: &mut Context) { + let is_enabled = self.filter_occupied_channels; + let kvp_store = KeyValueStore::global(cx); + self.pending_filter_serialization = cx.background_spawn( + async move { + if is_enabled { + kvp_store + .write_kvp(FILTER_OCCUPIED_CHANNELS_KEY.to_string(), "1".to_string()) + .await?; + } else { + kvp_store + .delete_kvp(FILTER_OCCUPIED_CHANNELS_KEY.to_string()) + .await?; + } + anyhow::Ok(()) + } + .log_err(), + ); + } + fn persist_favorites(&mut self, cx: &mut Context) { let favorite_ids: Vec = self .channel_store @@ -1955,11 +2020,11 @@ impl CollabPanel { .map(|id| id.0) .collect(); let kvp_store = KeyValueStore::global(cx); - self.pending_serialization = cx.background_spawn( + self.pending_favorites_serialization = cx.background_spawn( async move { let json = serde_json::to_string(&favorite_ids)?; kvp_store - .write_kvp("favorite_channels".to_string(), json) + .write_kvp(FAVORITE_CHANNELS_KEY.to_string(), json) .await?; anyhow::Ok(()) } @@ -2819,14 +2884,15 @@ impl CollabPanel { Some( h_flex() .child( - IconButton::new("filter-active-channels", IconName::ListFilter) + IconButton::new("filter-occupied-channels", IconName::ListFilter) .icon_size(IconSize::Small) - .toggle_state(self.filter_active_channels) + .toggle_state(self.filter_occupied_channels) .on_click(cx.listener(|this, _, _window, cx| { - this.filter_active_channels = !this.filter_active_channels; + this.filter_occupied_channels = !this.filter_occupied_channels; this.update_entries(true, cx); + this.persist_filter_occupied_channels(cx); })) - .tooltip(Tooltip::text(if self.filter_active_channels { + .tooltip(Tooltip::text(if self.filter_occupied_channels { "Show All Channels" } else { "Show Occupied Channels" @@ -3144,10 +3210,14 @@ impl CollabPanel { (IconName::Star, Color::Default, "Add to Favorites") }; + let height = px(24.); + h_flex() .id(ix) .group("") + .h(height) .w_full() + .overflow_hidden() .when(!channel.is_root_channel(), |el| { el.on_drag(channel.clone(), move |channel, _, _, cx| { cx.new(|_| DraggedChannelView { @@ -3175,6 +3245,7 @@ impl CollabPanel { ) .child( ListItem::new(ix) + .height(height) // Add one level of depth for the disclosure arrow. .indent_level(depth + 1) .indent_step_size(px(20.)) @@ -3256,12 +3327,13 @@ impl CollabPanel { .child( h_flex() .visible_on_hover("") + .h_full() .absolute() .right_0() .px_1() .gap_px() - .bg(cx.theme().colors().background) .rounded_l_md() + .bg(cx.theme().colors().background) .child({ let focus_handle = self.focus_handle.clone(); IconButton::new("channel_favorite", favorite_icon) @@ -3335,9 +3407,8 @@ fn render_tree_branch( ) -> impl IntoElement { let rem_size = window.rem_size(); let line_height = window.text_style().line_height_in_pixels(rem_size); - let width = rem_size * 1.5; let thickness = px(1.); - let color = cx.theme().colors().text; + let color = cx.theme().colors().icon_disabled; canvas( |_, _, _| {}, @@ -3367,8 +3438,8 @@ fn render_tree_branch( )); }, ) - .w(width) - .h(line_height) + .w(rem_size) + .h(line_height - px(2.)) } fn render_participant_name_and_handle(user: &User) -> impl IntoElement { diff --git a/crates/context_server/src/oauth.rs b/crates/context_server/src/oauth.rs index 8fa94b775bd270809e5b26aa7fe8478ad6378170..1a314de2fca9b9987336decb15b208ffd7759dea 100644 --- a/crates/context_server/src/oauth.rs +++ b/crates/context_server/src/oauth.rs @@ -2733,10 +2733,7 @@ mod tests { assert!(refreshed); assert_eq!(provider.access_token().as_deref(), Some("new-access")); - let notified_session = rx - .try_next() - .unwrap() - .expect("channel should have a session"); + let notified_session = rx.try_recv().expect("channel should have a session"); assert_eq!(notified_session.tokens.access_token, "new-access"); assert_eq!( notified_session.tokens.refresh_token.as_deref(), @@ -2768,10 +2765,7 @@ mod tests { let refreshed = provider.try_refresh().await.unwrap(); assert!(refreshed); - let notified_session = rx - .try_next() - .unwrap() - .expect("channel should have a session"); + let notified_session = rx.try_recv().expect("channel should have a session"); assert_eq!(notified_session.tokens.access_token, "new-access"); assert_eq!( notified_session.tokens.refresh_token.as_deref(), diff --git a/crates/copilot/src/copilot_edit_prediction_delegate.rs b/crates/copilot/src/copilot_edit_prediction_delegate.rs index 6f69bc6bc7bea4ec31aa59262a4abc5640999a2e..e789a89df65daf45dd02a16d954b299307e0c62d 100644 --- a/crates/copilot/src/copilot_edit_prediction_delegate.rs +++ b/crates/copilot/src/copilot_edit_prediction_delegate.rs @@ -1045,7 +1045,7 @@ mod tests { }); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); - assert!(copilot_requests.try_next().is_err()); + assert!(copilot_requests.try_recv().is_err()); _ = editor.update(cx, |editor, window, cx| { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { @@ -1055,7 +1055,7 @@ mod tests { }); executor.advance_clock(COPILOT_DEBOUNCE_TIMEOUT); - assert!(copilot_requests.try_next().is_ok()); + assert!(copilot_requests.try_recv().is_ok()); } fn handle_copilot_completion_request( diff --git a/crates/credentials_provider/Cargo.toml b/crates/credentials_provider/Cargo.toml index bf47bb24b12b90d54bc04f766efe06489c730b43..da83c0cd79a1b71bbb84746b3e893f33094783d6 100644 --- a/crates/credentials_provider/Cargo.toml +++ b/crates/credentials_provider/Cargo.toml @@ -13,9 +13,5 @@ path = "src/credentials_provider.rs" [dependencies] anyhow.workspace = true -futures.workspace = true gpui.workspace = true -paths.workspace = true -release_channel.workspace = true serde.workspace = true -serde_json.workspace = true diff --git a/crates/credentials_provider/src/credentials_provider.rs b/crates/credentials_provider/src/credentials_provider.rs index 249b8333e114223aa558cd33637fd103294a8f8d..b98e97673cc11272826af24c76e8a0a6a38b9211 100644 --- a/crates/credentials_provider/src/credentials_provider.rs +++ b/crates/credentials_provider/src/credentials_provider.rs @@ -1,26 +1,8 @@ -use std::collections::HashMap; use std::future::Future; -use std::path::PathBuf; use std::pin::Pin; -use std::sync::{Arc, LazyLock}; use anyhow::Result; -use futures::FutureExt as _; -use gpui::{App, AsyncApp}; -use release_channel::ReleaseChannel; - -/// An environment variable whose presence indicates that the system keychain -/// should be used in development. -/// -/// By default, running Zed in development uses the development credentials -/// provider. Setting this environment variable allows you to interact with the -/// system keychain (for instance, if you need to test something). -/// -/// Only works in development. Setting this environment variable in other -/// release channels is a no-op. -static ZED_DEVELOPMENT_USE_KEYCHAIN: LazyLock = LazyLock::new(|| { - std::env::var("ZED_DEVELOPMENT_USE_KEYCHAIN").is_ok_and(|value| !value.is_empty()) -}); +use gpui::AsyncApp; /// A provider for credentials. /// @@ -50,150 +32,3 @@ pub trait CredentialsProvider: Send + Sync { cx: &'a AsyncApp, ) -> Pin> + 'a>>; } - -impl dyn CredentialsProvider { - /// Returns the global [`CredentialsProvider`]. - pub fn global(cx: &App) -> Arc { - // The `CredentialsProvider` trait has `Send + Sync` bounds on it, so it - // seems like this is a false positive from Clippy. - #[allow(clippy::arc_with_non_send_sync)] - Self::new(cx) - } - - fn new(cx: &App) -> Arc { - let use_development_provider = match ReleaseChannel::try_global(cx) { - Some(ReleaseChannel::Dev) => { - // In development we default to using the development - // credentials provider to avoid getting spammed by relentless - // keychain access prompts. - // - // However, if the `ZED_DEVELOPMENT_USE_KEYCHAIN` environment - // variable is set, we will use the actual keychain. - !*ZED_DEVELOPMENT_USE_KEYCHAIN - } - Some(ReleaseChannel::Nightly | ReleaseChannel::Preview | ReleaseChannel::Stable) - | None => false, - }; - - if use_development_provider { - Arc::new(DevelopmentCredentialsProvider::new()) - } else { - Arc::new(KeychainCredentialsProvider) - } - } -} - -/// A credentials provider that stores credentials in the system keychain. -struct KeychainCredentialsProvider; - -impl CredentialsProvider for KeychainCredentialsProvider { - fn read_credentials<'a>( - &'a self, - url: &'a str, - cx: &'a AsyncApp, - ) -> Pin)>>> + 'a>> { - async move { cx.update(|cx| cx.read_credentials(url)).await }.boxed_local() - } - - fn write_credentials<'a>( - &'a self, - url: &'a str, - username: &'a str, - password: &'a [u8], - cx: &'a AsyncApp, - ) -> Pin> + 'a>> { - async move { - cx.update(move |cx| cx.write_credentials(url, username, password)) - .await - } - .boxed_local() - } - - fn delete_credentials<'a>( - &'a self, - url: &'a str, - cx: &'a AsyncApp, - ) -> Pin> + 'a>> { - async move { cx.update(move |cx| cx.delete_credentials(url)).await }.boxed_local() - } -} - -/// A credentials provider that stores credentials in a local file. -/// -/// This MUST only be used in development, as this is not a secure way of storing -/// credentials on user machines. -/// -/// Its existence is purely to work around the annoyance of having to constantly -/// re-allow access to the system keychain when developing Zed. -struct DevelopmentCredentialsProvider { - path: PathBuf, -} - -impl DevelopmentCredentialsProvider { - fn new() -> Self { - let path = paths::config_dir().join("development_credentials"); - - Self { path } - } - - fn load_credentials(&self) -> Result)>> { - let json = std::fs::read(&self.path)?; - let credentials: HashMap)> = serde_json::from_slice(&json)?; - - Ok(credentials) - } - - fn save_credentials(&self, credentials: &HashMap)>) -> Result<()> { - let json = serde_json::to_string(credentials)?; - std::fs::write(&self.path, json)?; - - Ok(()) - } -} - -impl CredentialsProvider for DevelopmentCredentialsProvider { - fn read_credentials<'a>( - &'a self, - url: &'a str, - _cx: &'a AsyncApp, - ) -> Pin)>>> + 'a>> { - async move { - Ok(self - .load_credentials() - .unwrap_or_default() - .get(url) - .cloned()) - } - .boxed_local() - } - - fn write_credentials<'a>( - &'a self, - url: &'a str, - username: &'a str, - password: &'a [u8], - _cx: &'a AsyncApp, - ) -> Pin> + 'a>> { - async move { - let mut credentials = self.load_credentials().unwrap_or_default(); - credentials.insert(url.to_string(), (username.to_string(), password.to_vec())); - - self.save_credentials(&credentials) - } - .boxed_local() - } - - fn delete_credentials<'a>( - &'a self, - url: &'a str, - _cx: &'a AsyncApp, - ) -> Pin> + 'a>> { - async move { - let mut credentials = self.load_credentials()?; - credentials.remove(url); - - self.save_credentials(&credentials) - } - .boxed_local() - } -} diff --git a/crates/csv_preview/src/csv_preview.rs b/crates/csv_preview/src/csv_preview.rs index c38cefb2456b3f44e3cac61b02294ab1ed1e79f4..1b99139b004a940dfa0902e185f67fb4b77ed6a1 100644 --- a/crates/csv_preview/src/csv_preview.rs +++ b/crates/csv_preview/src/csv_preview.rs @@ -161,9 +161,7 @@ impl CsvPreviewView { editor, |this: &mut CsvPreviewView, _editor, event: &EditorEvent, cx| { match event { - EditorEvent::Edited { .. } - | EditorEvent::DirtyChanged - | EditorEvent::ExcerptsEdited { .. } => { + EditorEvent::Edited { .. } | EditorEvent::DirtyChanged => { this.parse_csv_from_active_editor(true, cx); } _ => {} diff --git a/crates/debugger_tools/src/dap_log.rs b/crates/debugger_tools/src/dap_log.rs index 6a6ac706ecd7e4e3e7369afe503652b9756b6dec..2c653217716b0218cff0b60eb2bce4ac1ce02e5d 100644 --- a/crates/debugger_tools/src/dap_log.rs +++ b/crates/debugger_tools/src/dap_log.rs @@ -1086,6 +1086,7 @@ impl SearchableItem for DapLogView { // DAP log is read-only. replacement: false, selection: false, + select_all: true, } } fn active_match_index( diff --git a/crates/debugger_ui/src/debugger_ui.rs b/crates/debugger_ui/src/debugger_ui.rs index 124967650b31cd88e72b2867838fb3a4ecbcf920..f5947a4393b2eeb8ca6ad3f844962500aa4ecf2d 100644 --- a/crates/debugger_ui/src/debugger_ui.rs +++ b/crates/debugger_ui/src/debugger_ui.rs @@ -299,7 +299,7 @@ pub fn init(cx: &mut App) { return; } maybe!({ - let (buffer, position, _) = editor + let (buffer, position) = editor .update(cx, |editor, cx| { let cursor_point: language::Point = editor .selections diff --git a/crates/debugger_ui/src/session/running/console.rs b/crates/debugger_ui/src/session/running/console.rs index c488e88d74e7f282bd0424a2213e08e2c9bec15f..65bc949b2b6ddb1a707abf2e001ffde151fb70b8 100644 --- a/crates/debugger_ui/src/session/running/console.rs +++ b/crates/debugger_ui/src/session/running/console.rs @@ -7,8 +7,8 @@ use anyhow::Result; use collections::HashMap; use dap::{CompletionItem, CompletionItemType, OutputEvent}; use editor::{ - Bias, CompletionProvider, Editor, EditorElement, EditorMode, EditorStyle, ExcerptId, - HighlightKey, MultiBufferOffset, SizingBehavior, + Bias, CompletionProvider, Editor, EditorElement, EditorMode, EditorStyle, HighlightKey, + MultiBufferOffset, SizingBehavior, }; use fuzzy::StringMatchCandidate; use gpui::{ @@ -528,7 +528,6 @@ struct ConsoleQueryBarCompletionProvider(WeakEntity); impl CompletionProvider for ConsoleQueryBarCompletionProvider { fn completions( &self, - _excerpt_id: ExcerptId, buffer: &Entity, buffer_position: language::Anchor, _trigger: editor::CompletionContext, diff --git a/crates/dev_container/src/devcontainer_json.rs b/crates/dev_container/src/devcontainer_json.rs index 4429c63a37a87d1b54455b8169359ddf40511e24..84e40edff18c9bb1e15071841afb2648e3fd2cc4 100644 --- a/crates/dev_container/src/devcontainer_json.rs +++ b/crates/dev_container/src/devcontainer_json.rs @@ -60,7 +60,8 @@ pub(crate) enum ShutdownAction { #[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)] #[serde(rename_all = "camelCase")] pub(crate) struct MountDefinition { - pub(crate) source: String, + #[serde(default)] + pub(crate) source: Option, pub(crate) target: String, #[serde(rename = "type")] pub(crate) mount_type: Option, @@ -68,19 +69,23 @@ pub(crate) struct MountDefinition { impl Display for MountDefinition { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "type={},source={},target={},consistency=cached", - self.mount_type.clone().unwrap_or_else(|| { - if self.source.starts_with('/') { - "bind".to_string() - } else { - "volume".to_string() + let mount_type = self.mount_type.clone().unwrap_or_else(|| { + if let Some(source) = &self.source { + if source.starts_with('/') + || source.starts_with("\\\\") + || source.get(1..3) == Some(":\\") + || source.get(1..3) == Some(":/") + { + return "bind".to_string(); } - }), - self.source, - self.target - ) + } + "volume".to_string() + }); + write!(f, "type={}", mount_type)?; + if let Some(source) = &self.source { + write!(f, ",source={}", source)?; + } + write!(f, ",target={},consistency=cached", self.target) } } @@ -253,13 +258,6 @@ impl DevContainer { } return DevContainerBuildType::None; } - - pub(crate) fn has_features(&self) -> bool { - self.features - .as_ref() - .map(|features| !features.is_empty()) - .unwrap_or(false) - } } // Custom deserializer that parses the entire customizations object as a @@ -450,8 +448,6 @@ where } } - let source = source - .ok_or_else(|| D::Error::custom(format!("mount string missing 'source': {}", s)))?; let target = target .ok_or_else(|| D::Error::custom(format!("mount string missing 'target': {}", s)))?; @@ -505,9 +501,6 @@ where } } - let source = source.ok_or_else(|| { - D::Error::custom(format!("mount string missing 'source': {}", s)) - })?; let target = target.ok_or_else(|| { D::Error::custom(format!("mount string missing 'target': {}", s)) })?; @@ -876,7 +869,7 @@ mod test { ])), container_user: Some("myUser".to_string()), mounts: Some(vec![MountDefinition { - source: "/localfolder/app".to_string(), + source: Some("/localfolder/app".to_string()), target: "/workspaces/app".to_string(), mount_type: Some("volume".to_string()), }]), @@ -885,7 +878,7 @@ mod test { override_command: Some(true), workspace_folder: Some("/workspaces".to_string()), workspace_mount: Some(MountDefinition { - source: "/app".to_string(), + source: Some("/app".to_string()), target: "/workspaces/app".to_string(), mount_type: Some("bind".to_string()) }), @@ -1319,12 +1312,12 @@ mod test { container_user: Some("myUser".to_string()), mounts: Some(vec![ MountDefinition { - source: "/localfolder/app".to_string(), + source: Some("/localfolder/app".to_string()), target: "/workspaces/app".to_string(), mount_type: Some("volume".to_string()), }, MountDefinition { - source: "dev-containers-cli-bashhistory".to_string(), + source: Some("dev-containers-cli-bashhistory".to_string()), target: "/home/node/commandhistory".to_string(), mount_type: None, } @@ -1334,7 +1327,7 @@ mod test { override_command: Some(true), workspace_folder: Some("/workspaces".to_string()), workspace_mount: Some(MountDefinition { - source: "/folder".to_string(), + source: Some("/folder".to_string()), target: "/workspace".to_string(), mount_type: Some("bind".to_string()) }), @@ -1355,4 +1348,65 @@ mod test { assert_eq!(devcontainer.build_type(), DevContainerBuildType::Dockerfile); } + + #[test] + fn mount_definition_should_use_bind_type_for_unix_absolute_paths() { + let mount = MountDefinition { + source: Some("/home/user/project".to_string()), + target: "/workspaces/project".to_string(), + mount_type: None, + }; + + let rendered = mount.to_string(); + + assert!( + rendered.starts_with("type=bind,"), + "Expected mount type 'bind' for Unix absolute path, but got: {rendered}" + ); + } + + #[test] + fn mount_definition_should_use_bind_type_for_windows_unc_paths() { + let mount = MountDefinition { + source: Some("\\\\server\\share\\project".to_string()), + target: "/workspaces/project".to_string(), + mount_type: None, + }; + + let rendered = mount.to_string(); + + assert!( + rendered.starts_with("type=bind,"), + "Expected mount type 'bind' for Windows UNC path, but got: {rendered}" + ); + } + + #[test] + fn mount_definition_should_use_bind_type_for_windows_absolute_paths() { + let mount = MountDefinition { + source: Some("C:\\Users\\mrg\\cli".to_string()), + target: "/workspaces/cli".to_string(), + mount_type: None, + }; + + let rendered = mount.to_string(); + + assert!( + rendered.starts_with("type=bind,"), + "Expected mount type 'bind' for Windows absolute path, but got: {rendered}" + ); + } + + #[test] + fn mount_definition_should_omit_source_when_none() { + let mount = MountDefinition { + source: None, + target: "/tmp".to_string(), + mount_type: Some("tmpfs".to_string()), + }; + + let rendered = mount.to_string(); + + assert_eq!(rendered, "type=tmpfs,target=/tmp,consistency=cached"); + } } diff --git a/crates/dev_container/src/devcontainer_manifest.rs b/crates/dev_container/src/devcontainer_manifest.rs index 1c2863f96118b5bac006f3a590da8cf8980994e2..5ef82fa3eb2a3ac5d13810e0f6102bec4f42295a 100644 --- a/crates/dev_container/src/devcontainer_manifest.rs +++ b/crates/dev_container/src/devcontainer_manifest.rs @@ -20,7 +20,8 @@ use crate::{ }, docker::{ Docker, DockerClient, DockerComposeConfig, DockerComposeService, DockerComposeServiceBuild, - DockerComposeVolume, DockerInspect, DockerPs, get_remote_dir_from_config, + DockerComposeServicePort, DockerComposeVolume, DockerInspect, DockerPs, + get_remote_dir_from_config, }, features::{DevContainerFeatureJson, FeatureManifest, parse_oci_feature_ref}, get_oci_token, @@ -316,13 +317,6 @@ impl DevContainerManifest { let root_image_tag = self.get_base_image_from_config().await?; let root_image = self.docker_client.inspect(&root_image_tag).await?; - if dev_container.build_type() == DevContainerBuildType::Image - && !dev_container.has_features() - { - log::debug!("No resources to download. Proceeding with just the image"); - return Ok(()); - } - let temp_base = std::env::temp_dir().join("devcontainer-zed"); let timestamp = std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH) @@ -700,10 +694,29 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true } let dev_container = self.dev_container(); match dev_container.build_type() { - DevContainerBuildType::Image | DevContainerBuildType::Dockerfile => { + DevContainerBuildType::Image => { + let built_docker_image = self.build_docker_image().await?; + let Some(base_image) = dev_container.image.as_ref() else { + log::error!("Dev container is using and image which can't be referenced"); + return Err(DevContainerError::DevContainerParseFailed); + }; + let built_docker_image = self + .update_remote_user_uid(built_docker_image, base_image) + .await?; + + let resources = self.build_merged_resources(built_docker_image)?; + Ok(DevContainerBuildResources::Docker(resources)) + } + DevContainerBuildType::Dockerfile => { let built_docker_image = self.build_docker_image().await?; + let Some(features_build_info) = &self.features_build_info else { + log::error!( + "Can't attempt to build update UID dockerfile before initial docker build" + ); + return Err(DevContainerError::DevContainerParseFailed); + }; let built_docker_image = self - .update_remote_user_uid(built_docker_image, None) + .update_remote_user_uid(built_docker_image, &features_build_info.image_tag) .await?; let resources = self.build_merged_resources(built_docker_image)?; @@ -815,7 +828,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true let (main_service_name, main_service) = find_primary_service(&docker_compose_resources, self)?; - let built_service_image = if main_service + let (built_service_image, built_service_image_tag) = if main_service .build .as_ref() .map(|b| b.dockerfile.as_ref()) @@ -870,7 +883,13 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true labels: None, build: Some(DockerComposeServiceBuild { context: Some( - features_build_info.empty_context_dir.display().to_string(), + main_service + .build + .as_ref() + .and_then(|b| b.context.clone()) + .unwrap_or_else(|| { + features_build_info.empty_context_dir.display().to_string() + }), ), dockerfile: Some(dockerfile_path.display().to_string()), args: Some(build_args), @@ -904,16 +923,19 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true self.docker_client .docker_compose_build(&docker_compose_resources.files, &self.project_name()) .await?; - self.docker_client - .inspect(&features_build_info.image_tag) - .await? + ( + self.docker_client + .inspect(&features_build_info.image_tag) + .await?, + &features_build_info.image_tag, + ) } else if let Some(image) = &main_service.image { if dev_container .features .as_ref() .is_none_or(|features| features.is_empty()) { - self.docker_client.inspect(image).await? + (self.docker_client.inspect(image).await?, image) } else { if !supports_buildkit { self.build_feature_content_image().await?; @@ -993,9 +1015,12 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true .docker_compose_build(&docker_compose_resources.files, &self.project_name()) .await?; - self.docker_client - .inspect(&features_build_info.image_tag) - .await? + ( + self.docker_client + .inspect(&features_build_info.image_tag) + .await?, + &features_build_info.image_tag, + ) } } else { log::error!("Docker compose must have either image or dockerfile defined"); @@ -1003,7 +1028,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true }; let built_service_image = self - .update_remote_user_uid(built_service_image, Some(&features_build_info.image_tag)) + .update_remote_user_uid(built_service_image, built_service_image_tag) .await?; let resources = self.build_merged_resources(built_service_image)?; @@ -1052,7 +1077,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true network_mode_service: Option<&str>, resources: DockerBuildResources, ) -> Result { - let mut runtime_labels = vec![]; + let mut runtime_labels = HashMap::new(); if let Some(metadata) = &resources.image.config.labels.metadata { let serialized_metadata = serde_json_lenient::to_string(metadata).map_err(|e| { @@ -1060,14 +1085,11 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true DevContainerError::ContainerNotValid(resources.image.id.clone()) })?; - runtime_labels.push(format!( - "{}={}", - "devcontainer.metadata", serialized_metadata - )); + runtime_labels.insert("devcontainer.metadata".to_string(), serialized_metadata); } for (k, v) in self.identifying_labels() { - runtime_labels.push(format!("{}={}", k, v)); + runtime_labels.insert(k.to_string(), v.to_string()); } let config_volumes: HashMap = resources @@ -1076,11 +1098,12 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true .filter_map(|mount| { if let Some(mount_type) = &mount.mount_type && mount_type.to_lowercase() == "volume" + && let Some(source) = &mount.source { Some(( - mount.source.clone(), + source.clone(), DockerComposeVolume { - name: mount.source.clone(), + name: source.clone(), }, )) } else { @@ -1140,18 +1163,30 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true // If the main service uses a different service's network bridge, append to that service's ports instead if let Some(network_service_name) = network_mode_service { if let Some(service) = service_declarations.get_mut(network_service_name) { - service.ports.push(format!("{port}:{port}")); + service.ports.push(DockerComposeServicePort { + target: port.clone(), + published: port.clone(), + ..Default::default() + }); } else { service_declarations.insert( network_service_name.to_string(), DockerComposeService { - ports: vec![format!("{port}:{port}")], + ports: vec![DockerComposeServicePort { + target: port.clone(), + published: port.clone(), + ..Default::default() + }], ..Default::default() }, ); } } else { - main_service.ports.push(format!("{port}:{port}")); + main_service.ports.push(DockerComposeServicePort { + target: port.clone(), + published: port.clone(), + ..Default::default() + }); } } let other_service_ports: Vec<(&str, &str)> = forward_ports @@ -1174,12 +1209,20 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true .collect(); for (service_name, port) in other_service_ports { if let Some(service) = service_declarations.get_mut(service_name) { - service.ports.push(format!("{port}:{port}")); + service.ports.push(DockerComposeServicePort { + target: port.to_string(), + published: port.to_string(), + ..Default::default() + }); } else { service_declarations.insert( service_name.to_string(), DockerComposeService { - ports: vec![format!("{port}:{port}")], + ports: vec![DockerComposeServicePort { + target: port.to_string(), + published: port.to_string(), + ..Default::default() + }], ..Default::default() }, ); @@ -1189,18 +1232,30 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true if let Some(port) = &self.dev_container().app_port { if let Some(network_service_name) = network_mode_service { if let Some(service) = service_declarations.get_mut(network_service_name) { - service.ports.push(format!("{port}:{port}")); + service.ports.push(DockerComposeServicePort { + target: port.clone(), + published: port.clone(), + ..Default::default() + }); } else { service_declarations.insert( network_service_name.to_string(), DockerComposeService { - ports: vec![format!("{port}:{port}")], + ports: vec![DockerComposeServicePort { + target: port.clone(), + published: port.clone(), + ..Default::default() + }], ..Default::default() }, ); } } else { - main_service.ports.push(format!("{port}:{port}")); + main_service.ports.push(DockerComposeServicePort { + target: port.clone(), + published: port.clone(), + ..Default::default() + }); } } @@ -1282,7 +1337,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true async fn update_remote_user_uid( &self, image: DockerInspect, - _override_tag: Option<&str>, + _base_image: &str, ) -> Result { Ok(image) } @@ -1290,7 +1345,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true async fn update_remote_user_uid( &self, image: DockerInspect, - override_tag: Option<&str>, + base_image: &str, ) -> Result { let dev_container = self.dev_container(); @@ -1364,18 +1419,13 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${{PATH:-\3}}/g' /etc/profile || true DevContainerError::FilesystemError })?; - let updated_image_tag = override_tag - .map(|t| t.to_string()) - .unwrap_or_else(|| format!("{}-uid", features_build_info.image_tag)); + let updated_image_tag = format!("{}-uid", features_build_info.image_tag); let mut command = Command::new(self.docker_client.docker_cli()); command.args(["build"]); command.args(["-f", &dockerfile_path.display().to_string()]); command.args(["-t", &updated_image_tag]); - command.args([ - "--build-arg", - &format!("BASE_IMAGE={}", features_build_info.image_tag), - ]); + command.args(["--build-arg", &format!("BASE_IMAGE={}", base_image)]); command.args(["--build-arg", &format!("REMOTE_USER={}", remote_user)]); command.args(["--build-arg", &format!("NEW_UID={}", host_uid)]); command.args(["--build-arg", &format!("NEW_GID={}", host_gid)]); @@ -1701,7 +1751,7 @@ RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true }; Ok(MountDefinition { - source: self.local_workspace_folder(), + source: Some(self.local_workspace_folder()), target: format!("/workspaces/{}", project_directory_name.display()), mount_type: None, }) @@ -2292,23 +2342,21 @@ fn get_remote_user_from_config( { return Ok(user.clone()); } - let Some(metadata) = &docker_config.config.labels.metadata else { - log::error!("Could not locate metadata"); - return Err(DevContainerError::ContainerNotValid( - docker_config.id.clone(), - )); - }; - for metadatum in metadata { - if let Some(remote_user) = metadatum.get("remoteUser") { - if let Some(remote_user_str) = remote_user.as_str() { - return Ok(remote_user_str.to_string()); + if let Some(metadata) = &docker_config.config.labels.metadata { + for metadatum in metadata { + if let Some(remote_user) = metadatum.get("remoteUser") { + if let Some(remote_user_str) = remote_user.as_str() { + return Ok(remote_user_str.to_string()); + } } } } - log::error!("Could not locate the remote user"); - Err(DevContainerError::ContainerNotValid( - docker_config.id.clone(), - )) + if let Some(image_user) = &docker_config.config.image_user { + if !image_user.is_empty() { + return Ok(image_user.to_string()); + } + } + Ok("root".to_string()) } // This should come from spec - see the docs @@ -2332,7 +2380,7 @@ fn get_container_user_from_config( return Ok(image_user.to_string()); } - Err(DevContainerError::DevContainerParseFailed) + Ok("root".to_string()) } #[cfg(test)] @@ -2356,6 +2404,8 @@ mod test { use serde_json_lenient::Value; use util::{command::Command, paths::SanitizedPath}; + #[cfg(not(target_os = "windows"))] + use crate::docker::DockerComposeServicePort; use crate::{ DevContainerConfig, DevContainerContext, command_json::CommandRunner, @@ -3502,6 +3552,27 @@ ENV DOCKER_BUILDKIT=1 "# ); + let build_override = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "docker_compose_build.json") + }) + .expect("to be found"); + let build_override = test_dependencies.fs.load(build_override).await.unwrap(); + let build_config: DockerComposeConfig = + serde_json_lenient::from_str(&build_override).unwrap(); + let build_context = build_config + .services + .get("app") + .and_then(|s| s.build.as_ref()) + .and_then(|b| b.context.clone()) + .expect("build override should have a context"); + assert_eq!( + build_context, ".", + "build override should preserve the original build context from docker-compose.yml" + ); + let runtime_override = files .iter() .find(|f| { @@ -3526,14 +3597,14 @@ ENV DOCKER_BUILDKIT=1 cap_add: Some(vec!["SYS_PTRACE".to_string()]), security_opt: Some(vec!["seccomp=unconfined".to_string()]), privileged: Some(true), - labels: Some(vec![ - "devcontainer.metadata=[{\"remoteUser\":\"vscode\"}]".to_string(), - "devcontainer.local_folder=/path/to/local/project".to_string(), - "devcontainer.config_file=/path/to/local/project/.devcontainer/devcontainer.json".to_string() - ]), + labels: Some(HashMap::from([ + ("devcontainer.metadata".to_string(), "[{\"remoteUser\":\"vscode\"}]".to_string()), + ("devcontainer.local_folder".to_string(), "/path/to/local/project".to_string()), + ("devcontainer.config_file".to_string(), "/path/to/local/project/.devcontainer/devcontainer.json".to_string()) + ])), volumes: vec![ MountDefinition { - source: "dind-var-lib-docker-42dad4b4ca7b8ced".to_string(), + source: Some("dind-var-lib-docker-42dad4b4ca7b8ced".to_string()), target: "/var/lib/docker".to_string(), mount_type: Some("volume".to_string()) } @@ -3545,10 +3616,26 @@ ENV DOCKER_BUILDKIT=1 "db".to_string(), DockerComposeService { ports: vec![ - "8083:8083".to_string(), - "5432:5432".to_string(), - "1234:1234".to_string(), - "8084:8084".to_string() + DockerComposeServicePort { + target: "8083".to_string(), + published: "8083".to_string(), + ..Default::default() + }, + DockerComposeServicePort { + target: "5432".to_string(), + published: "5432".to_string(), + ..Default::default() + }, + DockerComposeServicePort { + target: "1234".to_string(), + published: "1234".to_string(), + ..Default::default() + }, + DockerComposeServicePort { + target: "8084".to_string(), + published: "8084".to_string(), + ..Default::default() + }, ], ..Default::default() }, @@ -4250,6 +4337,175 @@ chmod +x ./install.sh })) } + #[cfg(not(target_os = "windows"))] + #[gpui::test] + async fn test_spawns_devcontainer_with_plain_image(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + env_logger::try_init().ok(); + let given_devcontainer_contents = r#" + { + "name": "cli-${devcontainerId}", + "image": "test_image:latest", + } + "#; + + let (test_dependencies, mut devcontainer_manifest) = + init_default_devcontainer_manifest(cx, given_devcontainer_contents) + .await + .unwrap(); + + devcontainer_manifest.parse_nonremote_vars().unwrap(); + + let _devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap(); + + let files = test_dependencies.fs.files(); + let uid_dockerfile = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "updateUID.Dockerfile") + }) + .expect("to be found"); + let uid_dockerfile = test_dependencies.fs.load(uid_dockerfile).await.unwrap(); + + assert_eq!( + &uid_dockerfile, + r#"ARG BASE_IMAGE +FROM $BASE_IMAGE + +USER root + +ARG REMOTE_USER +ARG NEW_UID +ARG NEW_GID +SHELL ["/bin/sh", "-c"] +RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \ + if [ -z "$OLD_UID" ]; then \ + echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \ + elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \ + echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \ + elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \ + echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \ + else \ + if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \ + FREE_GID=65532; \ + while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \ + echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \ + sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \ + fi; \ + echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \ + sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \ + if [ "$OLD_GID" != "$NEW_GID" ]; then \ + sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \ + fi; \ + chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \ + fi; + +ARG IMAGE_USER +USER $IMAGE_USER + +# Ensure that /etc/profile does not clobber the existing path +RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true +"# + ); + } + + #[cfg(not(target_os = "windows"))] + #[gpui::test] + async fn test_spawns_devcontainer_with_docker_compose_and_plain_image(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + env_logger::try_init().ok(); + let given_devcontainer_contents = r#" + { + "name": "cli-${devcontainerId}", + "dockerComposeFile": "docker-compose-plain.yml", + "service": "app", + } + "#; + + let (test_dependencies, mut devcontainer_manifest) = + init_default_devcontainer_manifest(cx, given_devcontainer_contents) + .await + .unwrap(); + + test_dependencies + .fs + .atomic_write( + PathBuf::from(TEST_PROJECT_PATH).join(".devcontainer/docker-compose-plain.yml"), + r#" +services: + app: + image: test_image:latest + command: sleep infinity + volumes: + - ..:/workspace:cached + "# + .trim() + .to_string(), + ) + .await + .unwrap(); + + devcontainer_manifest.parse_nonremote_vars().unwrap(); + + let _devcontainer_up = devcontainer_manifest.build_and_run().await.unwrap(); + + let files = test_dependencies.fs.files(); + let uid_dockerfile = files + .iter() + .find(|f| { + f.file_name() + .is_some_and(|s| s.display().to_string() == "updateUID.Dockerfile") + }) + .expect("to be found"); + let uid_dockerfile = test_dependencies.fs.load(uid_dockerfile).await.unwrap(); + + assert_eq!( + &uid_dockerfile, + r#"ARG BASE_IMAGE +FROM $BASE_IMAGE + +USER root + +ARG REMOTE_USER +ARG NEW_UID +ARG NEW_GID +SHELL ["/bin/sh", "-c"] +RUN eval $(sed -n "s/${REMOTE_USER}:[^:]*:\([^:]*\):\([^:]*\):[^:]*:\([^:]*\).*/OLD_UID=\1;OLD_GID=\2;HOME_FOLDER=\3/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_UID}:.*/EXISTING_USER=\1/p" /etc/passwd); \ + eval $(sed -n "s/\([^:]*\):[^:]*:${NEW_GID}:.*/EXISTING_GROUP=\1/p" /etc/group); \ + if [ -z "$OLD_UID" ]; then \ + echo "Remote user not found in /etc/passwd ($REMOTE_USER)."; \ + elif [ "$OLD_UID" = "$NEW_UID" -a "$OLD_GID" = "$NEW_GID" ]; then \ + echo "UIDs and GIDs are the same ($NEW_UID:$NEW_GID)."; \ + elif [ "$OLD_UID" != "$NEW_UID" -a -n "$EXISTING_USER" ]; then \ + echo "User with UID exists ($EXISTING_USER=$NEW_UID)."; \ + else \ + if [ "$OLD_GID" != "$NEW_GID" -a -n "$EXISTING_GROUP" ]; then \ + FREE_GID=65532; \ + while grep -q ":[^:]*:${FREE_GID}:" /etc/group; do FREE_GID=$((FREE_GID - 1)); done; \ + echo "Reassigning group $EXISTING_GROUP from GID $NEW_GID to $FREE_GID."; \ + sed -i -e "s/\(${EXISTING_GROUP}:[^:]*:\)${NEW_GID}:/\1${FREE_GID}:/" /etc/group; \ + fi; \ + echo "Updating UID:GID from $OLD_UID:$OLD_GID to $NEW_UID:$NEW_GID."; \ + sed -i -e "s/\(${REMOTE_USER}:[^:]*:\)[^:]*:[^:]*/\1${NEW_UID}:${NEW_GID}/" /etc/passwd; \ + if [ "$OLD_GID" != "$NEW_GID" ]; then \ + sed -i -e "s/\([^:]*:[^:]*:\)${OLD_GID}:/\1${NEW_GID}:/" /etc/group; \ + fi; \ + chown -R $NEW_UID:$NEW_GID $HOME_FOLDER; \ + fi; + +ARG IMAGE_USER +USER $IMAGE_USER + +# Ensure that /etc/profile does not clobber the existing path +RUN sed -i -E 's/((^|\s)PATH=)([^\$]*)$/\1\${PATH:-\3}/g' /etc/profile || true +"# + ); + } + pub(crate) struct RecordedExecCommand { pub(crate) _container_id: String, pub(crate) _remote_folder: String, @@ -4372,6 +4628,24 @@ chmod +x ./install.sh state: None, }); } + if id == "test_image:latest" { + return Ok(DockerInspect { + id: "sha256:610e6cfca95280188b021774f8cf69dd6f49bdb6eebc34c5ee2010f4d51cc104" + .to_string(), + config: DockerInspectConfig { + labels: DockerConfigLabels { + metadata: Some(vec![HashMap::from([( + "remoteUser".to_string(), + Value::String("node".to_string()), + )])]), + }, + env: Vec::new(), + image_user: Some("root".to_string()), + }, + mounts: None, + state: None, + }); + } Err(DevContainerError::DockerNotAvailable) } @@ -4398,7 +4672,7 @@ chmod +x ./install.sh additional_contexts: None, }), volumes: vec![MountDefinition { - source: "../..".to_string(), + source: Some("../..".to_string()), target: "/workspaces".to_string(), mount_type: Some("bind".to_string()), }], @@ -4411,7 +4685,7 @@ chmod +x ./install.sh DockerComposeService { image: Some("postgres:14.1".to_string()), volumes: vec![MountDefinition { - source: "postgres-data".to_string(), + source: Some("postgres-data".to_string()), target: "/var/lib/postgresql/data".to_string(), mount_type: Some("volume".to_string()), }], @@ -4426,6 +4700,25 @@ chmod +x ./install.sh )]), })); } + if config_files.len() == 1 + && config_files.get(0) + == Some(&PathBuf::from( + "/path/to/local/project/.devcontainer/docker-compose-plain.yml", + )) + { + return Ok(Some(DockerComposeConfig { + name: None, + services: HashMap::from([( + "app".to_string(), + DockerComposeService { + image: Some("test_image:latest".to_string()), + command: vec!["sleep".to_string(), "infinity".to_string()], + ..Default::default() + }, + )]), + ..Default::default() + })); + } Err(DevContainerError::DockerNotAvailable) } async fn docker_compose_build( diff --git a/crates/dev_container/src/docker.rs b/crates/dev_container/src/docker.rs index 9594eae3d0faf67669e7d1ad487925b77a54fc34..b913aea5fd068fdc75337284f05d99a2266dba05 100644 --- a/crates/dev_container/src/docker.rs +++ b/crates/dev_container/src/docker.rs @@ -1,7 +1,7 @@ use std::{collections::HashMap, path::PathBuf}; use async_trait::async_trait; -use serde::{Deserialize, Deserializer, Serialize}; +use serde::{Deserialize, Deserializer, Serialize, de}; use util::command::Command; use crate::{ @@ -31,9 +31,10 @@ pub(crate) struct DockerInspect { pub(crate) state: Option, } -#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)] +#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)] pub(crate) struct DockerConfigLabels { #[serde( + default, rename = "devcontainer.metadata", deserialize_with = "deserialize_metadata" )] @@ -43,6 +44,7 @@ pub(crate) struct DockerConfigLabels { #[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq)] #[serde(rename_all = "PascalCase")] pub(crate) struct DockerInspectConfig { + #[serde(default, deserialize_with = "deserialize_nullable_labels")] pub(crate) labels: DockerConfigLabels, #[serde(rename = "User")] pub(crate) image_user: Option, @@ -54,12 +56,11 @@ impl DockerInspectConfig { pub(crate) fn env_as_map(&self) -> Result, DevContainerError> { let mut map = HashMap::new(); for env_var in &self.env { - let parts: Vec<&str> = env_var.split("=").collect(); - if parts.len() != 2 { - log::error!("Unable to parse {env_var} into and environment key-value"); + let Some((key, value)) = env_var.split_once('=') else { + log::error!("Unable to parse {env_var} into an environment key-value"); return Err(DevContainerError::DevContainerParseFailed); - } - map.insert(parts[0].to_string(), parts[1].to_string()); + }; + map.insert(key.to_string(), value.to_string()); } Ok(map) } @@ -84,6 +85,43 @@ pub(crate) struct DockerComposeServiceBuild { pub(crate) additional_contexts: Option>, } +#[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)] +pub(crate) struct DockerComposeServicePort { + #[serde(deserialize_with = "deserialize_string_or_int")] + pub(crate) target: String, + #[serde(deserialize_with = "deserialize_string_or_int")] + pub(crate) published: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) mode: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) protocol: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) host_ip: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) app_protocol: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) name: Option, +} + +fn deserialize_string_or_int<'de, D>(deserializer: D) -> Result +where + D: serde::Deserializer<'de>, +{ + use serde::Deserialize; + + #[derive(Deserialize)] + #[serde(untagged)] + enum StringOrInt { + String(String), + Int(u32), + } + + match StringOrInt::deserialize(deserializer)? { + StringOrInt::String(s) => Ok(s), + StringOrInt::Int(b) => Ok(b.to_string()), + } +} + #[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)] pub(crate) struct DockerComposeService { pub(crate) image: Option, @@ -93,19 +131,30 @@ pub(crate) struct DockerComposeService { pub(crate) cap_add: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) security_opt: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub(crate) labels: Option>, + #[serde( + skip_serializing_if = "Option::is_none", + default, + deserialize_with = "deserialize_labels" + )] + pub(crate) labels: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) build: Option, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) privileged: Option, + #[serde(default, skip_serializing_if = "Vec::is_empty")] pub(crate) volumes: Vec, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) env_file: Option>, #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub(crate) ports: Vec, + pub(crate) ports: Vec, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) network_mode: Option, + #[serde( + default, + skip_serializing_if = "Vec::is_empty", + deserialize_with = "deserialize_nullable_vec" + )] + pub(crate) command: Vec, } #[derive(Debug, Clone, Deserialize, Serialize, Eq, PartialEq, Default)] @@ -118,6 +167,7 @@ pub(crate) struct DockerComposeConfig { #[serde(skip_serializing_if = "Option::is_none")] pub(crate) name: Option, pub(crate) services: HashMap, + #[serde(default)] pub(crate) volumes: HashMap, } @@ -355,6 +405,77 @@ pub(crate) trait DockerClient { fn docker_cli(&self) -> String; } +fn deserialize_labels<'de, D>(deserializer: D) -> Result>, D::Error> +where + D: Deserializer<'de>, +{ + struct LabelsVisitor; + + impl<'de> de::Visitor<'de> for LabelsVisitor { + type Value = Option>; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("a sequence of strings or a map of string key-value pairs") + } + + fn visit_seq(self, seq: A) -> Result + where + A: de::SeqAccess<'de>, + { + let values = Vec::::deserialize(de::value::SeqAccessDeserializer::new(seq))?; + + Ok(Some( + values + .iter() + .filter_map(|v| { + let (key, value) = v.split_once('=')?; + Some((key.to_string(), value.to_string())) + }) + .collect(), + )) + } + + fn visit_map(self, map: M) -> Result + where + M: de::MapAccess<'de>, + { + HashMap::::deserialize(de::value::MapAccessDeserializer::new(map)) + .map(|v| Some(v)) + } + + fn visit_none(self) -> Result + where + E: de::Error, + { + Ok(None) + } + + fn visit_unit(self) -> Result + where + E: de::Error, + { + Ok(None) + } + } + + deserializer.deserialize_any(LabelsVisitor) +} + +fn deserialize_nullable_vec<'de, D, T>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, + T: Deserialize<'de>, +{ + Option::>::deserialize(deserializer).map(|opt| opt.unwrap_or_default()) +} + +fn deserialize_nullable_labels<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + Option::::deserialize(deserializer).map(|opt| opt.unwrap_or_default()) +} + fn deserialize_metadata<'de, D>( deserializer: D, ) -> Result>>, D::Error> @@ -417,11 +538,51 @@ mod test { command_json::deserialize_json_output, devcontainer_json::MountDefinition, docker::{ - Docker, DockerComposeConfig, DockerComposeService, DockerComposeVolume, DockerInspect, - DockerPs, get_remote_dir_from_config, + Docker, DockerComposeConfig, DockerComposeService, DockerComposeServicePort, + DockerComposeVolume, DockerInspect, DockerPs, get_remote_dir_from_config, }, }; + #[test] + fn should_parse_simple_env_var() { + let config = super::DockerInspectConfig { + labels: super::DockerConfigLabels { metadata: None }, + image_user: None, + env: vec!["KEY=value".to_string()], + }; + + let map = config.env_as_map().unwrap(); + assert_eq!(map.get("KEY").unwrap(), "value"); + } + + #[test] + fn should_parse_env_var_with_equals_in_value() { + let config = super::DockerInspectConfig { + labels: super::DockerConfigLabels { metadata: None }, + image_user: None, + env: vec!["COMPLEX=key=val other>=1.0".to_string()], + }; + + let map = config.env_as_map().unwrap(); + assert_eq!(map.get("COMPLEX").unwrap(), "key=val other>=1.0"); + } + + #[test] + fn should_parse_simple_label() { + let json = r#"{"volumes": [], "labels": ["com.example.key=value"]}"#; + let service: DockerComposeService = serde_json_lenient::from_str(json).unwrap(); + let labels = service.labels.unwrap(); + assert_eq!(labels.get("com.example.key").unwrap(), "value"); + } + + #[test] + fn should_parse_label_with_equals_in_value() { + let json = r#"{"volumes": [], "labels": ["com.example.key=value=with=equals"]}"#; + let service: DockerComposeService = serde_json_lenient::from_str(json).unwrap(); + let labels = service.labels.unwrap(); + assert_eq!(labels.get("com.example.key").unwrap(), "value=with=equals"); + } + #[test] fn should_create_docker_inspect_command() { let docker = Docker::new("docker"); @@ -805,6 +966,22 @@ mod test { "POSTGRES_PORT": "5432", "POSTGRES_USER": "postgres" }, + "ports": [ + { + "target": "5443", + "published": "5442" + }, + { + "name": "custom port", + "protocol": "udp", + "host_ip": "127.0.0.1", + "app_protocol": "http", + "mode": "host", + "target": "8081", + "published": "8083" + + } + ], "image": "mcr.microsoft.com/devcontainers/rust:2-1-bookworm", "network_mode": "service:db", "volumes": [ @@ -860,15 +1037,33 @@ mod test { ( "app".to_string(), DockerComposeService { + command: vec!["sleep".to_string(), "infinity".to_string()], image: Some( "mcr.microsoft.com/devcontainers/rust:2-1-bookworm".to_string(), ), volumes: vec![MountDefinition { mount_type: Some("bind".to_string()), - source: "/path/to".to_string(), + source: Some("/path/to".to_string()), target: "/workspaces".to_string(), }], network_mode: Some("service:db".to_string()), + + ports: vec![ + DockerComposeServicePort { + target: "5443".to_string(), + published: "5442".to_string(), + ..Default::default() + }, + DockerComposeServicePort { + target: "8081".to_string(), + published: "8083".to_string(), + mode: Some("host".to_string()), + protocol: Some("udp".to_string()), + host_ip: Some("127.0.0.1".to_string()), + app_protocol: Some("http".to_string()), + name: Some("custom port".to_string()), + }, + ], ..Default::default() }, ), @@ -878,7 +1073,7 @@ mod test { image: Some("postgres:14.1".to_string()), volumes: vec![MountDefinition { mount_type: Some("volume".to_string()), - source: "postgres-data".to_string(), + source: Some("postgres-data".to_string()), target: "/var/lib/postgresql/data".to_string(), }], ..Default::default() @@ -895,4 +1090,175 @@ mod test { assert_eq!(docker_compose_config, expected_config); } + + #[test] + fn should_deserialize_compose_labels_as_map() { + let given_config = r#" + { + "name": "devcontainer", + "services": { + "app": { + "image": "node:22-alpine", + "volumes": [], + "labels": { + "com.example.test": "value", + "another.label": "another-value" + } + } + } + } + "#; + + let config: DockerComposeConfig = serde_json_lenient::from_str(given_config).unwrap(); + let service = config.services.get("app").unwrap(); + let labels = service.labels.clone().unwrap(); + assert_eq!( + labels, + HashMap::from([ + ("another.label".to_string(), "another-value".to_string()), + ("com.example.test".to_string(), "value".to_string()) + ]) + ); + } + + #[test] + fn should_deserialize_compose_labels_as_array() { + let given_config = r#" + { + "name": "devcontainer", + "services": { + "app": { + "image": "node:22-alpine", + "volumes": [], + "labels": ["com.example.test=value"] + } + } + } + "#; + + let config: DockerComposeConfig = serde_json_lenient::from_str(given_config).unwrap(); + let service = config.services.get("app").unwrap(); + assert_eq!( + service.labels, + Some(HashMap::from([( + "com.example.test".to_string(), + "value".to_string() + )])) + ); + } + + #[test] + fn should_deserialize_compose_without_volumes() { + let given_config = r#" + { + "name": "devcontainer", + "services": { + "app": { + "image": "node:22-alpine", + "volumes": [] + } + } + } + "#; + + let config: DockerComposeConfig = serde_json_lenient::from_str(given_config).unwrap(); + assert!(config.volumes.is_empty()); + } + + #[test] + fn should_deserialize_compose_with_missing_volumes_field() { + let given_config = r#" + { + "name": "devcontainer", + "services": { + "sidecar": { + "image": "ubuntu:24.04" + } + } + } + "#; + + let config: DockerComposeConfig = serde_json_lenient::from_str(given_config).unwrap(); + let service = config.services.get("sidecar").unwrap(); + assert!(service.volumes.is_empty()); + } + + #[test] + fn should_deserialize_compose_volume_without_source() { + let given_config = r#" + { + "name": "devcontainer", + "services": { + "app": { + "image": "ubuntu:24.04", + "volumes": [ + { + "type": "tmpfs", + "target": "/tmp" + } + ] + } + } + } + "#; + + let config: DockerComposeConfig = serde_json_lenient::from_str(given_config).unwrap(); + let service = config.services.get("app").unwrap(); + assert_eq!(service.volumes.len(), 1); + assert_eq!(service.volumes[0].source, None); + assert_eq!(service.volumes[0].target, "/tmp"); + assert_eq!(service.volumes[0].mount_type, Some("tmpfs".to_string())); + } + + #[test] + fn should_deserialize_inspect_without_labels() { + let given_config = r#" + { + "Id": "sha256:abc123", + "Config": { + "Env": ["PATH=/usr/bin"], + "Cmd": ["node"], + "WorkingDir": "/" + } + } + "#; + + let inspect: DockerInspect = serde_json_lenient::from_str(given_config).unwrap(); + assert!(inspect.config.labels.metadata.is_none()); + assert!(inspect.config.image_user.is_none()); + } + + #[test] + fn should_deserialize_inspect_with_null_labels() { + let given_config = r#" + { + "Id": "sha256:abc123", + "Config": { + "Labels": null, + "Env": ["PATH=/usr/bin"] + } + } + "#; + + let inspect: DockerInspect = serde_json_lenient::from_str(given_config).unwrap(); + assert!(inspect.config.labels.metadata.is_none()); + } + + #[test] + fn should_deserialize_inspect_with_labels_but_no_metadata() { + let given_config = r#" + { + "Id": "sha256:abc123", + "Config": { + "Labels": { + "com.example.test": "value" + }, + "Env": ["PATH=/usr/bin"] + } + } + "#; + + let inspect: DockerInspect = serde_json_lenient::from_str(given_config).unwrap(); + assert!(inspect.config.labels.metadata.is_none()); + } } diff --git a/crates/diagnostics/src/buffer_diagnostics.rs b/crates/diagnostics/src/buffer_diagnostics.rs index 56924585011921ddebc96b971fd15c3abd151a85..040aeae4742e18449523cbc255b4370814c1f8d7 100644 --- a/crates/diagnostics/src/buffer_diagnostics.rs +++ b/crates/diagnostics/src/buffer_diagnostics.rs @@ -24,6 +24,7 @@ use settings::Settings; use std::{ any::{Any, TypeId}, cmp::{self, Ordering}, + ops::Range, sync::Arc, }; use text::{Anchor, BufferSnapshot, OffsetRangeExt}; @@ -480,25 +481,35 @@ impl BufferDiagnosticsEditor { }) }); - let (anchor_ranges, _) = - buffer_diagnostics_editor - .multibuffer - .update(cx, |multibuffer, cx| { - let excerpt_ranges = excerpt_ranges - .into_iter() - .map(|range| ExcerptRange { - context: range.context.to_point(&buffer_snapshot), - primary: range.primary.to_point(&buffer_snapshot), - }) - .collect(); - multibuffer.set_excerpt_ranges_for_path( - PathKey::for_buffer(&buffer, cx), - buffer.clone(), - &buffer_snapshot, - excerpt_ranges, - cx, - ) - }); + let excerpt_ranges: Vec<_> = excerpt_ranges + .into_iter() + .map(|range| ExcerptRange { + context: range.context.to_point(&buffer_snapshot), + primary: range.primary.to_point(&buffer_snapshot), + }) + .collect(); + buffer_diagnostics_editor + .multibuffer + .update(cx, |multibuffer, cx| { + multibuffer.set_excerpt_ranges_for_path( + PathKey::for_buffer(&buffer, cx), + buffer.clone(), + &buffer_snapshot, + excerpt_ranges.clone(), + cx, + ) + }); + let multibuffer_snapshot = + buffer_diagnostics_editor.multibuffer.read(cx).snapshot(cx); + let anchor_ranges: Vec> = excerpt_ranges + .into_iter() + .filter_map(|range| { + let text_range = buffer_snapshot.anchor_range_inside(range.primary); + let start = multibuffer_snapshot.anchor_in_buffer(text_range.start)?; + let end = multibuffer_snapshot.anchor_in_buffer(text_range.end)?; + Some(start..end) + }) + .collect(); if was_empty { if let Some(anchor_range) = anchor_ranges.first() { diff --git a/crates/diagnostics/src/diagnostic_renderer.rs b/crates/diagnostics/src/diagnostic_renderer.rs index 62b7f4eadf322da1c57a9f1da60b412d7b0dcd68..eaf414560845ea326fc508fe19d71fb01ebc1f32 100644 --- a/crates/diagnostics/src/diagnostic_renderer.rs +++ b/crates/diagnostics/src/diagnostic_renderer.rs @@ -10,7 +10,7 @@ use language::{BufferId, Diagnostic, DiagnosticEntryRef, LanguageRegistry}; use lsp::DiagnosticSeverity; use markdown::{CopyButtonVisibility, Markdown, MarkdownElement}; use settings::Settings; -use text::{AnchorRangeExt, Point}; +use text::Point; use theme_settings::ThemeSettings; use ui::{CopyButton, prelude::*}; use util::maybe; @@ -289,23 +289,12 @@ impl DiagnosticBlock { .nth(ix) { let multibuffer = editor.buffer().read(cx); - let Some(snapshot) = multibuffer - .buffer(buffer_id) - .map(|entity| entity.read(cx).snapshot()) - else { + if let Some(anchor_range) = multibuffer + .snapshot(cx) + .buffer_anchor_range_to_anchor_range(diagnostic.range) + { + Self::jump_to(editor, anchor_range, window, cx); return; - }; - - for (excerpt_id, _, range) in multibuffer.excerpts_for_buffer(buffer_id, cx) { - if range.context.overlaps(&diagnostic.range, &snapshot) { - Self::jump_to( - editor, - Anchor::range_in_buffer(excerpt_id, diagnostic.range), - window, - cx, - ); - return; - } } } } else if let Some(diagnostic) = editor diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index b200d01669a90c1e439338b9b01118cce8b8bb0c..dc3708e9307032a43b062289764656fa05b20d46 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -12,7 +12,7 @@ use buffer_diagnostics::BufferDiagnosticsEditor; use collections::{BTreeSet, HashMap, HashSet}; use diagnostic_renderer::DiagnosticBlock; use editor::{ - Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey, + Anchor, Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey, display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId}, multibuffer_context_lines, }; @@ -301,17 +301,21 @@ impl ProjectDiagnosticsEditor { let snapshot = self .editor .update(cx, |editor, cx| editor.display_snapshot(cx)); - let buffer = self.multibuffer.read(cx); - let buffer_ids = buffer.all_buffer_ids(); let selected_buffers = self.editor.update(cx, |editor, _| { editor .selections .all_anchors(&snapshot) .iter() - .filter_map(|anchor| anchor.start.text_anchor.buffer_id) + .filter_map(|anchor| { + Some(snapshot.anchor_to_buffer_anchor(anchor.start)?.0.buffer_id) + }) .collect::>() }); - for buffer_id in buffer_ids { + for buffer_id in snapshot + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) + .dedup() + { if retain_selections && selected_buffers.contains(&buffer_id) { continue; } @@ -329,7 +333,7 @@ impl ProjectDiagnosticsEditor { continue; } self.multibuffer.update(cx, |b, cx| { - b.remove_excerpts_for_path(PathKey::for_buffer(&buffer, cx), cx); + b.remove_excerpts(PathKey::for_buffer(&buffer, cx), cx); }); } } @@ -581,9 +585,8 @@ impl ProjectDiagnosticsEditor { match retain_excerpts { RetainExcerpts::Dirty if !is_dirty => Vec::new(), RetainExcerpts::All | RetainExcerpts::Dirty => multi_buffer - .excerpts_for_buffer(buffer_id, cx) - .into_iter() - .map(|(_, _, range)| range) + .snapshot(cx) + .excerpts_for_buffer(buffer_id) .sorted_by(|a, b| cmp_excerpts(&buffer_snapshot, a, b)) .collect(), } @@ -621,22 +624,34 @@ impl ProjectDiagnosticsEditor { }); }) } - let (anchor_ranges, _) = this.multibuffer.update(cx, |multi_buffer, cx| { - let excerpt_ranges = excerpt_ranges - .into_iter() - .map(|range| ExcerptRange { - context: range.context.to_point(&buffer_snapshot), - primary: range.primary.to_point(&buffer_snapshot), - }) - .collect(); + let buffer_snapshot = buffer.read(cx).snapshot(); + let excerpt_ranges: Vec<_> = excerpt_ranges + .into_iter() + .map(|range| ExcerptRange { + context: range.context.to_point(&buffer_snapshot), + primary: range.primary.to_point(&buffer_snapshot), + }) + .collect(); + // TODO(cole): maybe should use the nonshrinking API? + this.multibuffer.update(cx, |multi_buffer, cx| { multi_buffer.set_excerpt_ranges_for_path( PathKey::for_buffer(&buffer, cx), buffer.clone(), &buffer_snapshot, - excerpt_ranges, + excerpt_ranges.clone(), cx, ) }); + let multibuffer_snapshot = this.multibuffer.read(cx).snapshot(cx); + let anchor_ranges: Vec> = excerpt_ranges + .into_iter() + .filter_map(|range| { + let text_range = buffer_snapshot.anchor_range_inside(range.primary); + let start = multibuffer_snapshot.anchor_in_buffer(text_range.start)?; + let end = multibuffer_snapshot.anchor_in_buffer(text_range.end)?; + Some(start..end) + }) + .collect(); #[cfg(test)] let cloned_blocks = result_blocks.clone(); diff --git a/crates/edit_prediction/Cargo.toml b/crates/edit_prediction/Cargo.toml index 75a589dea8f9c7fefe7bf13400cbdde54bf90bf1..eabb1641fd4fbec7b2f8ef0ba399a8fe9600dfa3 100644 --- a/crates/edit_prediction/Cargo.toml +++ b/crates/edit_prediction/Cargo.toml @@ -26,6 +26,7 @@ cloud_llm_client.workspace = true collections.workspace = true copilot.workspace = true copilot_ui.workspace = true +credentials_provider.workspace = true db.workspace = true edit_prediction_types.workspace = true edit_prediction_context.workspace = true @@ -65,6 +66,7 @@ uuid.workspace = true workspace.workspace = true worktree.workspace = true zed_actions.workspace = true +zed_credentials_provider.workspace = true zeta_prompt.workspace = true zstd.workspace = true diff --git a/crates/edit_prediction/src/capture_example.rs b/crates/edit_prediction/src/capture_example.rs index d21df7868162d279cb18aeea3ef04d4ea9d7be7f..9463456132ce391b54aca8327cb6f900d81481d6 100644 --- a/crates/edit_prediction/src/capture_example.rs +++ b/crates/edit_prediction/src/capture_example.rs @@ -258,6 +258,7 @@ fn generate_timestamp_name() -> String { mod tests { use super::*; use crate::EditPredictionStore; + use client::RefreshLlmTokenListener; use client::{Client, UserStore}; use clock::FakeSystemClock; use gpui::{AppContext as _, TestAppContext, http_client::FakeHttpClient}; @@ -414,7 +415,7 @@ mod tests { capture_example( project.clone(), buffer.clone(), - Anchor::MIN, + Anchor::min_for_buffer(buffer.read(cx).remote_id()), events, true, cx, @@ -548,7 +549,8 @@ mod tests { let http_client = FakeHttpClient::with_404_response(); let client = Client::new(Arc::new(FakeSystemClock::new()), http_client, cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); EditPredictionStore::global(&client, &user_store, cx); }) } diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 3a66f712e31d7853bede21ab96ca6c7e92bea967..280427df006b510e1854ffb40cd7f995fcd9fdc6 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -1,5 +1,5 @@ use anyhow::Result; -use client::{Client, EditPredictionUsage, UserStore}; +use client::{Client, EditPredictionUsage, NeedsLlmTokenRefresh, UserStore, global_llm_token}; use cloud_api_types::{OrganizationId, SubmitEditPredictionFeedbackBody}; use cloud_llm_client::predict_edits_v3::{ PredictEditsV3Request, PredictEditsV3Response, RawCompletionRequest, RawCompletionResponse, @@ -11,6 +11,7 @@ use cloud_llm_client::{ }; use collections::{HashMap, HashSet}; use copilot::{Copilot, Reinstall, SignIn, SignOut}; +use credentials_provider::CredentialsProvider; use db::kvp::{Dismissable, KeyValueStore}; use edit_prediction_context::{RelatedExcerptStore, RelatedExcerptStoreEvent, RelatedFile}; use feature_flags::{FeatureFlag, FeatureFlagAppExt as _}; @@ -30,7 +31,7 @@ use heapless::Vec as ArrayVec; use language::language_settings::all_language_settings; use language::{Anchor, Buffer, File, Point, TextBufferSnapshot, ToOffset, ToPoint}; use language::{BufferSnapshot, OffsetRangeExt}; -use language_model::{LlmApiToken, NeedsLlmTokenRefresh}; +use language_model::LlmApiToken; use project::{DisableAiSettings, Project, ProjectPath, WorktreeId}; use release_channel::AppVersion; use semver::Version; @@ -150,6 +151,7 @@ pub struct EditPredictionStore { rated_predictions: HashSet, #[cfg(test)] settled_event_callback: Option>, + credentials_provider: Arc, } pub(crate) struct EditPredictionRejectionPayload { @@ -746,7 +748,7 @@ impl EditPredictionStore { pub fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { let data_collection_choice = Self::load_data_collection_choice(cx); - let llm_token = LlmApiToken::global(cx); + let llm_token = global_llm_token(cx); let (reject_tx, reject_rx) = mpsc::unbounded(); cx.background_spawn({ @@ -787,6 +789,8 @@ impl EditPredictionStore { .log_err(); }); + let credentials_provider = zed_credentials_provider::global(cx); + let this = Self { projects: HashMap::default(), client, @@ -807,6 +811,8 @@ impl EditPredictionStore { shown_predictions: Default::default(), #[cfg(test)] settled_event_callback: None, + + credentials_provider, }; this @@ -871,7 +877,9 @@ impl EditPredictionStore { let experiments = cx .background_spawn(async move { let http_client = client.http_client(); - let token = llm_token.acquire(&client, organization_id).await?; + let token = client + .acquire_llm_token(&llm_token, organization_id.clone()) + .await?; let url = http_client.build_zed_llm_url("/edit_prediction_experiments", &[])?; let request = http_client::Request::builder() .method(Method::GET) @@ -1676,7 +1684,7 @@ impl EditPredictionStore { buffer.pending_predictions.push(PendingSettledPrediction { request_id: request_id, editable_anchor_range: edited_buffer_snapshot - .anchor_range_around(editable_offset_range), + .anchor_range_inside(editable_offset_range), example, e2e_latency, enqueued_at: now, @@ -2315,7 +2323,10 @@ impl EditPredictionStore { zeta::request_prediction_with_zeta(self, inputs, capture_data, cx) } EditPredictionModel::Fim { format } => fim::request_prediction(inputs, format, cx), - EditPredictionModel::Mercury => self.mercury.request_prediction(inputs, cx), + EditPredictionModel::Mercury => { + self.mercury + .request_prediction(inputs, self.credentials_provider.clone(), cx) + } }; cx.spawn(async move |this, cx| { @@ -2351,7 +2362,10 @@ impl EditPredictionStore { cx: &mut AsyncApp, ) -> Result, language::Anchor)>> { let collaborator_cursor_rows: Vec = active_buffer_snapshot - .selections_in_range(Anchor::MIN..Anchor::MAX, false) + .selections_in_range( + Anchor::min_max_range_for_buffer(active_buffer_snapshot.remote_id()), + false, + ) .flat_map(|(_, _, _, selections)| { selections.map(|s| s.head().to_point(active_buffer_snapshot).row) }) @@ -2427,7 +2441,10 @@ impl EditPredictionStore { candidate_buffer.read_with(cx, |buffer, _cx| { let snapshot = buffer.snapshot(); let has_collaborators = snapshot - .selections_in_range(Anchor::MIN..Anchor::MAX, false) + .selections_in_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + false, + ) .next() .is_some(); let position = buffer @@ -2530,12 +2547,15 @@ impl EditPredictionStore { Res: DeserializeOwned, { let http_client = client.http_client(); - let mut token = if require_auth { - Some(llm_token.acquire(&client, organization_id.clone()).await?) + Some( + client + .acquire_llm_token(&llm_token, organization_id.clone()) + .await?, + ) } else { - llm_token - .acquire(&client, organization_id.clone()) + client + .acquire_llm_token(&llm_token, organization_id.clone()) .await .ok() }; @@ -2579,7 +2599,11 @@ impl EditPredictionStore { return Ok((serde_json::from_slice(&body)?, usage)); } else if !did_retry && token.is_some() && response.needs_llm_token_refresh() { did_retry = true; - token = Some(llm_token.refresh(&client, organization_id.clone()).await?); + token = Some( + client + .refresh_llm_token(&llm_token, organization_id.clone()) + .await?, + ); } else { let mut body = String::new(); response.body_mut().read_to_string(&mut body).await?; @@ -2761,7 +2785,7 @@ fn collaborator_edit_overlaps_locality_region( (position..position).to_point(snapshot), COLLABORATOR_EDIT_LOCALITY_CONTEXT_TOKENS, ); - let locality_anchor_range = snapshot.anchor_range_around(locality_point_range); + let locality_anchor_range = snapshot.anchor_range_inside(locality_point_range); edit_range.overlaps(&locality_anchor_range, snapshot) } diff --git a/crates/edit_prediction/src/edit_prediction_tests.rs b/crates/edit_prediction/src/edit_prediction_tests.rs index 6fe61338e764a40aec9cf6f3191f1191bafe9200..ea7233cd976148f5eb726730635e0efaf6ceef86 100644 --- a/crates/edit_prediction/src/edit_prediction_tests.rs +++ b/crates/edit_prediction/src/edit_prediction_tests.rs @@ -1,6 +1,6 @@ use super::*; use crate::udiff::apply_diff_to_string; -use client::{UserStore, test::FakeServer}; +use client::{RefreshLlmTokenListener, UserStore, test::FakeServer}; use clock::FakeSystemClock; use clock::ReplicaId; use cloud_api_types::{CreateLlmTokenResponse, LlmToken}; @@ -23,7 +23,7 @@ use language::{ Anchor, Buffer, Capability, CursorShape, Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSeverity, Operation, Point, Selection, SelectionGoal, }; -use language_model::RefreshLlmTokenListener; + use lsp::LanguageServerId; use parking_lot::Mutex; use pretty_assertions::{assert_eq, assert_matches}; @@ -2439,7 +2439,8 @@ fn init_test_with_fake_client( client.cloud_client().set_credentials(1, "test".into()); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); let ep_store = EditPredictionStore::global(&client, &user_store, cx); ( @@ -2706,6 +2707,65 @@ async fn test_edit_prediction_no_spurious_trailing_newline(cx: &mut TestAppConte }); } +#[gpui::test] +async fn test_v3_prediction_strips_cursor_marker_from_edit_text(cx: &mut TestAppContext) { + let (ep_store, mut requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + "/root", + json!({ + "foo.txt": "hello" + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project + .find_project_path(path!("root/foo.txt"), cx) + .unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let position = snapshot.anchor_before(language::Point::new(0, 5)); + + ep_store.update(cx, |ep_store, cx| { + ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); + }); + + let (request, respond_tx) = requests.predict.next().await.unwrap(); + let excerpt_length = request.input.cursor_excerpt.len(); + respond_tx + .send(PredictEditsV3Response { + request_id: Uuid::new_v4().to_string(), + output: "hello<|user_cursor|> world".to_string(), + editable_range: 0..excerpt_length, + model_version: None, + }) + .unwrap(); + + cx.run_until_parked(); + + ep_store.update(cx, |ep_store, cx| { + let prediction = ep_store + .prediction_at(&buffer, None, &project, cx) + .expect("should have prediction"); + let snapshot = buffer.read(cx).snapshot(); + let edits: Vec<_> = prediction + .edits + .iter() + .map(|(range, text)| (range.to_offset(&snapshot), text.clone())) + .collect(); + + assert_eq!(edits, vec![(5..5, " world".into())]); + }); +} + fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); @@ -2891,7 +2951,7 @@ async fn test_unauthenticated_without_custom_url_blocks_prediction_impl(cx: &mut cx.update(|cx| client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx)); let user_store = cx.update(|cx| cx.new(|cx| client::UserStore::new(client.clone(), cx))); cx.update(|cx| { - language_model::RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); }); let ep_store = cx.new(|cx| EditPredictionStore::new(client, project.read(cx).user_store(), cx)); diff --git a/crates/edit_prediction/src/example_spec.rs b/crates/edit_prediction/src/example_spec.rs index 4486cde22c3429568bf29f152d0f5f2ded59e8f4..a7da51173eefbcdb9e014f7dcca917e6ebebebf5 100644 --- a/crates/edit_prediction/src/example_spec.rs +++ b/crates/edit_prediction/src/example_spec.rs @@ -1,10 +1,11 @@ -use crate::udiff::DiffLine; use anyhow::{Context as _, Result}; use serde::{Deserialize, Serialize}; use std::{borrow::Cow, fmt::Write as _, mem, path::Path, sync::Arc}; use telemetry_events::EditPredictionRating; -pub const CURSOR_POSITION_MARKER: &str = "[CURSOR_POSITION]"; +pub use zeta_prompt::udiff::{ + CURSOR_POSITION_MARKER, encode_cursor_in_patch, extract_cursor_from_patch, +}; pub const INLINE_CURSOR_MARKER: &str = "<|user_cursor|>"; /// Maximum cursor file size to capture (64KB). @@ -12,64 +13,6 @@ pub const INLINE_CURSOR_MARKER: &str = "<|user_cursor|>"; /// falling back to git-based loading. pub const MAX_CURSOR_FILE_SIZE: usize = 64 * 1024; -/// Encodes a cursor position into a diff patch by adding a comment line with a caret -/// pointing to the cursor column. -/// -/// The cursor offset is relative to the start of the new text content (additions and context lines). -/// Returns the patch with cursor marker comment lines inserted after the relevant addition line. -pub fn encode_cursor_in_patch(patch: &str, cursor_offset: Option) -> String { - let Some(cursor_offset) = cursor_offset else { - return patch.to_string(); - }; - - let mut result = String::new(); - let mut line_start_offset = 0usize; - - for line in patch.lines() { - if matches!( - DiffLine::parse(line), - DiffLine::Garbage(content) - if content.starts_with('#') && content.contains(CURSOR_POSITION_MARKER) - ) { - continue; - } - - if !result.is_empty() { - result.push('\n'); - } - result.push_str(line); - - match DiffLine::parse(line) { - DiffLine::Addition(content) => { - let line_end_offset = line_start_offset + content.len(); - - if cursor_offset >= line_start_offset && cursor_offset <= line_end_offset { - let cursor_column = cursor_offset - line_start_offset; - - result.push('\n'); - result.push('#'); - for _ in 0..cursor_column { - result.push(' '); - } - write!(result, "^{}", CURSOR_POSITION_MARKER).unwrap(); - } - - line_start_offset = line_end_offset + 1; - } - DiffLine::Context(content) => { - line_start_offset += content.len() + 1; - } - _ => {} - } - } - - if patch.ends_with('\n') { - result.push('\n'); - } - - result -} - #[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] pub struct ExampleSpec { #[serde(default)] @@ -509,53 +452,7 @@ impl ExampleSpec { pub fn expected_patches_with_cursor_positions(&self) -> Vec<(String, Option)> { self.expected_patches .iter() - .map(|patch| { - let mut clean_patch = String::new(); - let mut cursor_offset: Option = None; - let mut line_start_offset = 0usize; - let mut prev_line_start_offset = 0usize; - - for line in patch.lines() { - let diff_line = DiffLine::parse(line); - - match &diff_line { - DiffLine::Garbage(content) - if content.starts_with('#') - && content.contains(CURSOR_POSITION_MARKER) => - { - let caret_column = if let Some(caret_pos) = content.find('^') { - caret_pos - } else if let Some(_) = content.find('<') { - 0 - } else { - continue; - }; - let cursor_column = caret_column.saturating_sub('#'.len_utf8()); - cursor_offset = Some(prev_line_start_offset + cursor_column); - } - _ => { - if !clean_patch.is_empty() { - clean_patch.push('\n'); - } - clean_patch.push_str(line); - - match diff_line { - DiffLine::Addition(content) | DiffLine::Context(content) => { - prev_line_start_offset = line_start_offset; - line_start_offset += content.len() + 1; - } - _ => {} - } - } - } - } - - if patch.ends_with('\n') && !clean_patch.is_empty() { - clean_patch.push('\n'); - } - - (clean_patch, cursor_offset) - }) + .map(|patch| extract_cursor_from_patch(patch)) .collect() } diff --git a/crates/edit_prediction/src/license_detection.rs b/crates/edit_prediction/src/license_detection.rs index 6f701d13a9d4d915bbfbc2442ea5643afac30ef4..88edfc306ebca21076908b3c05f7cf2837b19209 100644 --- a/crates/edit_prediction/src/license_detection.rs +++ b/crates/edit_prediction/src/license_detection.rs @@ -21,14 +21,23 @@ use worktree::ChildEntriesOptions; static LICENSE_FILE_NAME_REGEX: LazyLock = LazyLock::new(|| { regex::bytes::RegexBuilder::new( "^ \ - (?: license | licence)? \ - (?: [\\-._]? \ + (?: \ + (?: license | licence) \ + (?: [\\-._]? \ + (?: apache (?: [\\-._] (?: 2.0 | 2 ))? | \ + 0? bsd (?: [\\-._] [0123])? (?: [\\-._] clause)? | \ + isc | \ + mit | \ + upl | \ + zlib))? \ + | \ (?: apache (?: [\\-._] (?: 2.0 | 2 ))? | \ 0? bsd (?: [\\-._] [0123])? (?: [\\-._] clause)? | \ isc | \ mit | \ upl | \ - zlib))? \ + zlib) \ + ) \ (?: [\\-._]? (?: license | licence))? \ (?: \\.txt | \\.md)? \ $", @@ -310,6 +319,7 @@ impl LicenseDetectionWatcher { } worktree::Event::DeletedEntry(_) | worktree::Event::UpdatedGitRepositories(_) + | worktree::Event::UpdatedRootRepoCommonDir | worktree::Event::Deleted => {} }); @@ -350,6 +360,9 @@ impl LicenseDetectionWatcher { return None; }; let metadata = fs.metadata(&abs_path).await.log_err()??; + if metadata.is_dir { + return None; + } if metadata.len > LICENSE_PATTERNS.approximate_max_length as u64 { log::debug!( "`{abs_path:?}` license file was skipped \ @@ -697,6 +710,7 @@ mod tests { assert!(LICENSE_FILE_NAME_REGEX.is_match(b"licence-upl.txt")); // Test non-matching patterns + assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"")); assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"COPYING")); assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"LICENSE.html")); assert!(!LICENSE_FILE_NAME_REGEX.is_match(b"MYLICENSE")); diff --git a/crates/edit_prediction/src/mercury.rs b/crates/edit_prediction/src/mercury.rs index df47a38062344512a784c6d2feb563e9848afb27..155fd449904687081da0a9eae3d4731863f02254 100644 --- a/crates/edit_prediction/src/mercury.rs +++ b/crates/edit_prediction/src/mercury.rs @@ -5,6 +5,7 @@ use crate::{ }; use anyhow::{Context as _, Result}; use cloud_llm_client::EditPredictionRejectReason; +use credentials_provider::CredentialsProvider; use futures::AsyncReadExt as _; use gpui::{ App, AppContext as _, Context, Entity, Global, SharedString, Task, @@ -51,10 +52,11 @@ impl Mercury { debug_tx, .. }: EditPredictionModelInput, + credentials_provider: Arc, cx: &mut Context, ) -> Task>> { self.api_token.update(cx, |key_state, cx| { - _ = key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, cx); + _ = key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, credentials_provider, cx); }); let Some(api_token) = self.api_token.read(cx).key(&MERCURY_CREDENTIALS_URL) else { return Task::ready(Ok(None)); @@ -387,8 +389,9 @@ pub fn mercury_api_token(cx: &mut App) -> Entity { } pub fn load_mercury_api_token(cx: &mut App) -> Task> { + let credentials_provider = zed_credentials_provider::global(cx); mercury_api_token(cx).update(cx, |key_state, cx| { - key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, cx) + key_state.load_if_needed(MERCURY_CREDENTIALS_URL, |s| s, credentials_provider, cx) }) } diff --git a/crates/edit_prediction/src/open_ai_compatible.rs b/crates/edit_prediction/src/open_ai_compatible.rs index ca378ba1fd0bc9bdbb3e85c7610e1b94c1be388f..9a11164822857d78c2fe0d9245faeb5d4f7400a0 100644 --- a/crates/edit_prediction/src/open_ai_compatible.rs +++ b/crates/edit_prediction/src/open_ai_compatible.rs @@ -42,9 +42,10 @@ pub fn open_ai_compatible_api_token(cx: &mut App) -> Entity { pub fn load_open_ai_compatible_api_token( cx: &mut App, ) -> Task> { + let credentials_provider = zed_credentials_provider::global(cx); let api_url = open_ai_compatible_api_url(cx); open_ai_compatible_api_token(cx).update(cx, |key_state, cx| { - key_state.load_if_needed(api_url, |s| s, cx) + key_state.load_if_needed(api_url, |s| s, credentials_provider, cx) }) } diff --git a/crates/edit_prediction/src/udiff.rs b/crates/edit_prediction/src/udiff.rs index 14be1991d34e985067f5ad8729fd7ac8485211db..b2468755a8979f28635aa5e91cacf1490dc1ccd8 100644 --- a/crates/edit_prediction/src/udiff.rs +++ b/crates/edit_prediction/src/udiff.rs @@ -1,11 +1,4 @@ -use std::{ - borrow::Cow, - fmt::{Debug, Display, Write}, - mem, - ops::Range, - path::{Path, PathBuf}, - sync::Arc, -}; +use std::{mem, ops::Range, path::Path, path::PathBuf, sync::Arc}; use anyhow::{Context as _, Result, anyhow}; use collections::{HashMap, hash_map::Entry}; @@ -15,6 +8,14 @@ use postage::stream::Stream as _; use project::Project; use util::{paths::PathStyle, rel_path::RelPath}; use worktree::Worktree; +use zeta_prompt::udiff::{ + DiffEvent, DiffParser, FileStatus, Hunk, disambiguate_by_line_number, find_context_candidates, +}; + +pub use zeta_prompt::udiff::{ + DiffLine, HunkLocation, apply_diff_to_string, apply_diff_to_string_with_hunk_offset, + strip_diff_metadata, strip_diff_path_prefix, +}; #[derive(Clone, Debug)] pub struct OpenedBuffers(HashMap>); @@ -54,7 +55,6 @@ pub async fn apply_diff( let mut included_files: HashMap> = HashMap::default(); - let ranges = [Anchor::MIN..Anchor::MAX]; let mut diff = DiffParser::new(diff_str); let mut current_file = None; let mut edits: Vec<(std::ops::Range, Arc)> = vec![]; @@ -115,7 +115,7 @@ pub async fn apply_diff( edits.extend(resolve_hunk_edits_in_buffer( hunk, buffer, - ranges.as_slice(), + &[Anchor::min_max_range_for_buffer(buffer.remote_id())], status, )?); anyhow::Ok(()) @@ -190,209 +190,6 @@ pub async fn refresh_worktree_entries( Ok(()) } -pub fn strip_diff_path_prefix<'a>(diff: &'a str, prefix: &str) -> Cow<'a, str> { - if prefix.is_empty() { - return Cow::Borrowed(diff); - } - - let prefix_with_slash = format!("{}/", prefix); - let mut needs_rewrite = false; - - for line in diff.lines() { - match DiffLine::parse(line) { - DiffLine::OldPath { path } | DiffLine::NewPath { path } => { - if path.starts_with(&prefix_with_slash) { - needs_rewrite = true; - break; - } - } - _ => {} - } - } - - if !needs_rewrite { - return Cow::Borrowed(diff); - } - - let mut result = String::with_capacity(diff.len()); - for line in diff.lines() { - match DiffLine::parse(line) { - DiffLine::OldPath { path } => { - let stripped = path - .strip_prefix(&prefix_with_slash) - .unwrap_or(path.as_ref()); - result.push_str(&format!("--- a/{}\n", stripped)); - } - DiffLine::NewPath { path } => { - let stripped = path - .strip_prefix(&prefix_with_slash) - .unwrap_or(path.as_ref()); - result.push_str(&format!("+++ b/{}\n", stripped)); - } - _ => { - result.push_str(line); - result.push('\n'); - } - } - } - - Cow::Owned(result) -} -/// Strip unnecessary git metadata lines from a diff, keeping only the lines -/// needed for patch application: path headers (--- and +++), hunk headers (@@), -/// and content lines (+, -, space). -pub fn strip_diff_metadata(diff: &str) -> String { - let mut result = String::new(); - - for line in diff.lines() { - let dominated = DiffLine::parse(line); - match dominated { - // Keep path headers, hunk headers, and content lines - DiffLine::OldPath { .. } - | DiffLine::NewPath { .. } - | DiffLine::HunkHeader(_) - | DiffLine::Context(_) - | DiffLine::Deletion(_) - | DiffLine::Addition(_) - | DiffLine::NoNewlineAtEOF => { - result.push_str(line); - result.push('\n'); - } - // Skip garbage lines (diff --git, index, etc.) - DiffLine::Garbage(_) => {} - } - } - - result -} - -/// Find all byte offsets where `hunk.context` occurs as a substring of `text`. -/// -/// If no exact matches are found and the context ends with `'\n'` but `text` -/// does not, retries without the trailing newline, accepting only a match at -/// the very end of `text`. When this fallback fires, the hunk's context is -/// trimmed and its edit ranges are clamped so that downstream code doesn't -/// index past the end of the matched region. This handles diffs that are -/// missing a `\ No newline at end of file` marker: the parser always appends -/// `'\n'` via `writeln!`, so the context can have a trailing newline that -/// doesn't exist in the source text. -fn find_context_candidates(text: &str, hunk: &mut Hunk) -> Vec { - let candidates: Vec = text - .match_indices(&hunk.context) - .map(|(offset, _)| offset) - .collect(); - - if !candidates.is_empty() { - return candidates; - } - - if hunk.context.ends_with('\n') && !hunk.context.is_empty() { - let old_len = hunk.context.len(); - hunk.context.pop(); - let new_len = hunk.context.len(); - - if !hunk.context.is_empty() { - let candidates: Vec = text - .match_indices(&hunk.context) - .filter(|(offset, _)| offset + new_len == text.len()) - .map(|(offset, _)| offset) - .collect(); - - if !candidates.is_empty() { - for edit in &mut hunk.edits { - let touched_phantom = edit.range.end > new_len; - edit.range.start = edit.range.start.min(new_len); - edit.range.end = edit.range.end.min(new_len); - if touched_phantom { - // The replacement text was also written with a - // trailing '\n' that corresponds to the phantom - // newline we just removed from the context. - if edit.text.ends_with('\n') { - edit.text.pop(); - } - } - } - return candidates; - } - - // Restore if fallback didn't help either. - hunk.context.push('\n'); - debug_assert_eq!(hunk.context.len(), old_len); - } else { - hunk.context.push('\n'); - } - } - - Vec::new() -} - -/// Given multiple candidate offsets where context matches, use line numbers to disambiguate. -/// Returns the offset that matches the expected line, or None if no match or no line number available. -fn disambiguate_by_line_number( - candidates: &[usize], - expected_line: Option, - offset_to_line: &dyn Fn(usize) -> u32, -) -> Option { - match candidates.len() { - 0 => None, - 1 => Some(candidates[0]), - _ => { - let expected = expected_line?; - candidates - .iter() - .copied() - .find(|&offset| offset_to_line(offset) == expected) - } - } -} - -pub fn apply_diff_to_string(diff_str: &str, text: &str) -> Result { - apply_diff_to_string_with_hunk_offset(diff_str, text).map(|(text, _)| text) -} - -/// Applies a diff to a string and returns the result along with the offset where -/// the first hunk's context matched in the original text. This offset can be used -/// to adjust cursor positions that are relative to the hunk's content. -pub fn apply_diff_to_string_with_hunk_offset( - diff_str: &str, - text: &str, -) -> Result<(String, Option)> { - let mut diff = DiffParser::new(diff_str); - - let mut text = text.to_string(); - let mut first_hunk_offset = None; - - while let Some(event) = diff.next().context("Failed to parse diff")? { - match event { - DiffEvent::Hunk { - mut hunk, - path: _, - status: _, - } => { - let candidates = find_context_candidates(&text, &mut hunk); - - let hunk_offset = - disambiguate_by_line_number(&candidates, hunk.start_line, &|offset| { - text[..offset].matches('\n').count() as u32 - }) - .ok_or_else(|| anyhow!("couldn't resolve hunk"))?; - - if first_hunk_offset.is_none() { - first_hunk_offset = Some(hunk_offset); - } - - for edit in hunk.edits.iter().rev() { - let range = (hunk_offset + edit.range.start)..(hunk_offset + edit.range.end); - text.replace_range(range, &edit.text); - } - } - DiffEvent::FileEnd { .. } => {} - } - } - - Ok((text, first_hunk_offset)) -} - /// Returns the individual edits that would be applied by a diff to the given content. /// Each edit is a tuple of (byte_range_in_content, replacement_text). /// Uses sub-line diffing to find the precise character positions of changes. @@ -441,227 +238,6 @@ pub fn edits_for_diff(content: &str, diff_str: &str) -> Result Ok(result) } -struct PatchFile<'a> { - old_path: Cow<'a, str>, - new_path: Cow<'a, str>, -} - -struct DiffParser<'a> { - current_file: Option>, - current_line: Option<(&'a str, DiffLine<'a>)>, - hunk: Hunk, - diff: std::str::Lines<'a>, - pending_start_line: Option, - processed_no_newline: bool, - last_diff_op: LastDiffOp, -} - -#[derive(Clone, Copy, Default)] -enum LastDiffOp { - #[default] - None, - Context, - Deletion, - Addition, -} - -#[derive(Debug, PartialEq)] -enum DiffEvent<'a> { - Hunk { - path: Cow<'a, str>, - hunk: Hunk, - status: FileStatus, - }, - FileEnd { - renamed_to: Option>, - }, -} - -#[derive(Debug, Clone, Copy, PartialEq)] -enum FileStatus { - Created, - Modified, - Deleted, -} - -#[derive(Debug, Default, PartialEq)] -struct Hunk { - context: String, - edits: Vec, - start_line: Option, -} - -impl Hunk { - fn is_empty(&self) -> bool { - self.context.is_empty() && self.edits.is_empty() - } -} - -#[derive(Debug, PartialEq)] -struct Edit { - range: Range, - text: String, -} - -impl<'a> DiffParser<'a> { - fn new(diff: &'a str) -> Self { - let mut diff = diff.lines(); - let current_line = diff.next().map(|line| (line, DiffLine::parse(line))); - DiffParser { - current_file: None, - hunk: Hunk::default(), - current_line, - diff, - pending_start_line: None, - processed_no_newline: false, - last_diff_op: LastDiffOp::None, - } - } - - fn next(&mut self) -> Result>> { - loop { - let (hunk_done, file_done) = match self.current_line.as_ref().map(|e| &e.1) { - Some(DiffLine::OldPath { .. }) | Some(DiffLine::Garbage(_)) | None => (true, true), - Some(DiffLine::HunkHeader(_)) => (true, false), - _ => (false, false), - }; - - if hunk_done { - if let Some(file) = &self.current_file - && !self.hunk.is_empty() - { - let status = if file.old_path == "/dev/null" { - FileStatus::Created - } else if file.new_path == "/dev/null" { - FileStatus::Deleted - } else { - FileStatus::Modified - }; - let path = if status == FileStatus::Created { - file.new_path.clone() - } else { - file.old_path.clone() - }; - let mut hunk = mem::take(&mut self.hunk); - hunk.start_line = self.pending_start_line.take(); - self.processed_no_newline = false; - self.last_diff_op = LastDiffOp::None; - return Ok(Some(DiffEvent::Hunk { path, hunk, status })); - } - } - - if file_done { - if let Some(PatchFile { old_path, new_path }) = self.current_file.take() { - return Ok(Some(DiffEvent::FileEnd { - renamed_to: if old_path != new_path && old_path != "/dev/null" { - Some(new_path) - } else { - None - }, - })); - } - } - - let Some((line, parsed_line)) = self.current_line.take() else { - break; - }; - - util::maybe!({ - match parsed_line { - DiffLine::OldPath { path } => { - self.current_file = Some(PatchFile { - old_path: path, - new_path: "".into(), - }); - } - DiffLine::NewPath { path } => { - if let Some(current_file) = &mut self.current_file { - current_file.new_path = path - } - } - DiffLine::HunkHeader(location) => { - if let Some(loc) = location { - self.pending_start_line = Some(loc.start_line_old); - } - } - DiffLine::Context(ctx) => { - if self.current_file.is_some() { - writeln!(&mut self.hunk.context, "{ctx}")?; - self.last_diff_op = LastDiffOp::Context; - } - } - DiffLine::Deletion(del) => { - if self.current_file.is_some() { - let range = self.hunk.context.len() - ..self.hunk.context.len() + del.len() + '\n'.len_utf8(); - if let Some(last_edit) = self.hunk.edits.last_mut() - && last_edit.range.end == range.start - { - last_edit.range.end = range.end; - } else { - self.hunk.edits.push(Edit { - range, - text: String::new(), - }); - } - writeln!(&mut self.hunk.context, "{del}")?; - self.last_diff_op = LastDiffOp::Deletion; - } - } - DiffLine::Addition(add) => { - if self.current_file.is_some() { - let range = self.hunk.context.len()..self.hunk.context.len(); - if let Some(last_edit) = self.hunk.edits.last_mut() - && last_edit.range.end == range.start - { - writeln!(&mut last_edit.text, "{add}").unwrap(); - } else { - self.hunk.edits.push(Edit { - range, - text: format!("{add}\n"), - }); - } - self.last_diff_op = LastDiffOp::Addition; - } - } - DiffLine::NoNewlineAtEOF => { - if !self.processed_no_newline { - self.processed_no_newline = true; - match self.last_diff_op { - LastDiffOp::Addition => { - // Remove trailing newline from the last addition - if let Some(last_edit) = self.hunk.edits.last_mut() { - last_edit.text.pop(); - } - } - LastDiffOp::Deletion => { - // Remove trailing newline from context (which includes the deletion) - self.hunk.context.pop(); - if let Some(last_edit) = self.hunk.edits.last_mut() { - last_edit.range.end -= 1; - } - } - LastDiffOp::Context | LastDiffOp::None => { - // Remove trailing newline from context - self.hunk.context.pop(); - } - } - } - } - DiffLine::Garbage(_) => {} - } - - anyhow::Ok(()) - }) - .with_context(|| format!("on line:\n\n```\n{}```", line))?; - - self.current_line = self.diff.next().map(|line| (line, DiffLine::parse(line))); - } - - anyhow::Ok(None) - } -} - fn resolve_hunk_edits_in_buffer( mut hunk: Hunk, buffer: &TextBufferSnapshot, @@ -714,144 +290,6 @@ fn resolve_hunk_edits_in_buffer( Ok(iter) } -#[derive(Debug, PartialEq)] -pub enum DiffLine<'a> { - OldPath { path: Cow<'a, str> }, - NewPath { path: Cow<'a, str> }, - HunkHeader(Option), - Context(&'a str), - Deletion(&'a str), - Addition(&'a str), - NoNewlineAtEOF, - Garbage(&'a str), -} - -#[derive(Debug, PartialEq)] -pub struct HunkLocation { - pub start_line_old: u32, - count_old: u32, - pub start_line_new: u32, - count_new: u32, -} - -impl<'a> DiffLine<'a> { - pub fn parse(line: &'a str) -> Self { - Self::try_parse(line).unwrap_or(Self::Garbage(line)) - } - - fn try_parse(line: &'a str) -> Option { - if line.starts_with("\\ No newline") { - return Some(Self::NoNewlineAtEOF); - } - if let Some(header) = line.strip_prefix("---").and_then(eat_required_whitespace) { - let path = parse_header_path("a/", header); - Some(Self::OldPath { path }) - } else if let Some(header) = line.strip_prefix("+++").and_then(eat_required_whitespace) { - Some(Self::NewPath { - path: parse_header_path("b/", header), - }) - } else if let Some(header) = line.strip_prefix("@@").and_then(eat_required_whitespace) { - if header.starts_with("...") { - return Some(Self::HunkHeader(None)); - } - - let mut tokens = header.split_whitespace(); - let old_range = tokens.next()?.strip_prefix('-')?; - let new_range = tokens.next()?.strip_prefix('+')?; - - let (start_line_old, count_old) = old_range.split_once(',').unwrap_or((old_range, "1")); - let (start_line_new, count_new) = new_range.split_once(',').unwrap_or((new_range, "1")); - - Some(Self::HunkHeader(Some(HunkLocation { - start_line_old: start_line_old.parse::().ok()?.saturating_sub(1), - count_old: count_old.parse().ok()?, - start_line_new: start_line_new.parse::().ok()?.saturating_sub(1), - count_new: count_new.parse().ok()?, - }))) - } else if let Some(deleted_header) = line.strip_prefix("-") { - Some(Self::Deletion(deleted_header)) - } else if line.is_empty() { - Some(Self::Context("")) - } else if let Some(context) = line.strip_prefix(" ") { - Some(Self::Context(context)) - } else { - Some(Self::Addition(line.strip_prefix("+")?)) - } - } -} - -impl<'a> Display for DiffLine<'a> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - DiffLine::OldPath { path } => write!(f, "--- {path}"), - DiffLine::NewPath { path } => write!(f, "+++ {path}"), - DiffLine::HunkHeader(Some(hunk_location)) => { - write!( - f, - "@@ -{},{} +{},{} @@", - hunk_location.start_line_old + 1, - hunk_location.count_old, - hunk_location.start_line_new + 1, - hunk_location.count_new - ) - } - DiffLine::HunkHeader(None) => write!(f, "@@ ... @@"), - DiffLine::Context(content) => write!(f, " {content}"), - DiffLine::Deletion(content) => write!(f, "-{content}"), - DiffLine::Addition(content) => write!(f, "+{content}"), - DiffLine::NoNewlineAtEOF => write!(f, "\\ No newline at end of file"), - DiffLine::Garbage(line) => write!(f, "{line}"), - } - } -} - -fn parse_header_path<'a>(strip_prefix: &'static str, header: &'a str) -> Cow<'a, str> { - if !header.contains(['"', '\\']) { - let path = header.split_ascii_whitespace().next().unwrap_or(header); - return Cow::Borrowed(path.strip_prefix(strip_prefix).unwrap_or(path)); - } - - let mut path = String::with_capacity(header.len()); - let mut in_quote = false; - let mut chars = header.chars().peekable(); - let mut strip_prefix = Some(strip_prefix); - - while let Some(char) = chars.next() { - if char == '"' { - in_quote = !in_quote; - } else if char == '\\' { - let Some(&next_char) = chars.peek() else { - break; - }; - chars.next(); - path.push(next_char); - } else if char.is_ascii_whitespace() && !in_quote { - break; - } else { - path.push(char); - } - - if let Some(prefix) = strip_prefix - && path == prefix - { - strip_prefix.take(); - path.clear(); - } - } - - Cow::Owned(path) -} - -fn eat_required_whitespace(header: &str) -> Option<&str> { - let trimmed = header.trim_ascii_start(); - - if trimmed.len() == header.len() { - None - } else { - Some(trimmed) - } -} - #[cfg(test)] mod tests { use super::*; @@ -863,387 +301,6 @@ mod tests { use settings::SettingsStore; use util::path; - #[test] - fn parse_lines_simple() { - let input = indoc! {" - diff --git a/text.txt b/text.txt - index 86c770d..a1fd855 100644 - --- a/file.txt - +++ b/file.txt - @@ -1,2 +1,3 @@ - context - -deleted - +inserted - garbage - - --- b/file.txt - +++ a/file.txt - "}; - - let lines = input.lines().map(DiffLine::parse).collect::>(); - - pretty_assertions::assert_eq!( - lines, - &[ - DiffLine::Garbage("diff --git a/text.txt b/text.txt"), - DiffLine::Garbage("index 86c770d..a1fd855 100644"), - DiffLine::OldPath { - path: "file.txt".into() - }, - DiffLine::NewPath { - path: "file.txt".into() - }, - DiffLine::HunkHeader(Some(HunkLocation { - start_line_old: 0, - count_old: 2, - start_line_new: 0, - count_new: 3 - })), - DiffLine::Context("context"), - DiffLine::Deletion("deleted"), - DiffLine::Addition("inserted"), - DiffLine::Garbage("garbage"), - DiffLine::Context(""), - DiffLine::OldPath { - path: "b/file.txt".into() - }, - DiffLine::NewPath { - path: "a/file.txt".into() - }, - ] - ); - } - - #[test] - fn file_header_extra_space() { - let options = ["--- file", "--- file", "---\tfile"]; - - for option in options { - pretty_assertions::assert_eq!( - DiffLine::parse(option), - DiffLine::OldPath { - path: "file".into() - }, - "{option}", - ); - } - } - - #[test] - fn hunk_header_extra_space() { - let options = [ - "@@ -1,2 +1,3 @@", - "@@ -1,2 +1,3 @@", - "@@\t-1,2\t+1,3\t@@", - "@@ -1,2 +1,3 @@", - "@@ -1,2 +1,3 @@", - "@@ -1,2 +1,3 @@", - "@@ -1,2 +1,3 @@ garbage", - ]; - - for option in options { - pretty_assertions::assert_eq!( - DiffLine::parse(option), - DiffLine::HunkHeader(Some(HunkLocation { - start_line_old: 0, - count_old: 2, - start_line_new: 0, - count_new: 3 - })), - "{option}", - ); - } - } - - #[test] - fn hunk_header_without_location() { - pretty_assertions::assert_eq!(DiffLine::parse("@@ ... @@"), DiffLine::HunkHeader(None)); - } - - #[test] - fn test_parse_path() { - assert_eq!(parse_header_path("a/", "foo.txt"), "foo.txt"); - assert_eq!( - parse_header_path("a/", "foo/bar/baz.txt"), - "foo/bar/baz.txt" - ); - assert_eq!(parse_header_path("a/", "a/foo.txt"), "foo.txt"); - assert_eq!( - parse_header_path("a/", "a/foo/bar/baz.txt"), - "foo/bar/baz.txt" - ); - - // Extra - assert_eq!( - parse_header_path("a/", "a/foo/bar/baz.txt 2025"), - "foo/bar/baz.txt" - ); - assert_eq!( - parse_header_path("a/", "a/foo/bar/baz.txt\t2025"), - "foo/bar/baz.txt" - ); - assert_eq!( - parse_header_path("a/", "a/foo/bar/baz.txt \""), - "foo/bar/baz.txt" - ); - - // Quoted - assert_eq!( - parse_header_path("a/", "a/foo/bar/\"baz quox.txt\""), - "foo/bar/baz quox.txt" - ); - assert_eq!( - parse_header_path("a/", "\"a/foo/bar/baz quox.txt\""), - "foo/bar/baz quox.txt" - ); - assert_eq!( - parse_header_path("a/", "\"foo/bar/baz quox.txt\""), - "foo/bar/baz quox.txt" - ); - assert_eq!(parse_header_path("a/", "\"whatever 🤷\""), "whatever 🤷"); - assert_eq!( - parse_header_path("a/", "\"foo/bar/baz quox.txt\" 2025"), - "foo/bar/baz quox.txt" - ); - // unescaped quotes are dropped - assert_eq!(parse_header_path("a/", "foo/\"bar\""), "foo/bar"); - - // Escaped - assert_eq!( - parse_header_path("a/", "\"foo/\\\"bar\\\"/baz.txt\""), - "foo/\"bar\"/baz.txt" - ); - assert_eq!( - parse_header_path("a/", "\"C:\\\\Projects\\\\My App\\\\old file.txt\""), - "C:\\Projects\\My App\\old file.txt" - ); - } - - #[test] - fn test_parse_diff_with_leading_and_trailing_garbage() { - let diff = indoc! {" - I need to make some changes. - - I'll change the following things: - - one - - two - - three - - ``` - --- a/file.txt - +++ b/file.txt - one - +AND - two - ``` - - Summary of what I did: - - one - - two - - three - - That's about it. - "}; - - let mut events = Vec::new(); - let mut parser = DiffParser::new(diff); - while let Some(event) = parser.next().unwrap() { - events.push(event); - } - - assert_eq!( - events, - &[ - DiffEvent::Hunk { - path: "file.txt".into(), - hunk: Hunk { - context: "one\ntwo\n".into(), - edits: vec![Edit { - range: 4..4, - text: "AND\n".into() - }], - start_line: None, - }, - status: FileStatus::Modified, - }, - DiffEvent::FileEnd { renamed_to: None } - ], - ) - } - - #[test] - fn test_no_newline_at_eof() { - let diff = indoc! {" - --- a/file.py - +++ b/file.py - @@ -55,7 +55,3 @@ class CustomDataset(Dataset): - torch.set_rng_state(state) - mask = self.transform(mask) - - - if self.mode == 'Training': - - return (img, mask, name) - - else: - - return (img, mask, name) - \\ No newline at end of file - "}; - - let mut events = Vec::new(); - let mut parser = DiffParser::new(diff); - while let Some(event) = parser.next().unwrap() { - events.push(event); - } - - assert_eq!( - events, - &[ - DiffEvent::Hunk { - path: "file.py".into(), - hunk: Hunk { - context: concat!( - " torch.set_rng_state(state)\n", - " mask = self.transform(mask)\n", - "\n", - " if self.mode == 'Training':\n", - " return (img, mask, name)\n", - " else:\n", - " return (img, mask, name)", - ) - .into(), - edits: vec![Edit { - range: 80..203, - text: "".into() - }], - start_line: Some(54), // @@ -55,7 -> line 54 (0-indexed) - }, - status: FileStatus::Modified, - }, - DiffEvent::FileEnd { renamed_to: None } - ], - ); - } - - #[test] - fn test_no_newline_at_eof_addition() { - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,2 +1,3 @@ - context - -deleted - +added line - \\ No newline at end of file - "}; - - let mut events = Vec::new(); - let mut parser = DiffParser::new(diff); - while let Some(event) = parser.next().unwrap() { - events.push(event); - } - - assert_eq!( - events, - &[ - DiffEvent::Hunk { - path: "file.txt".into(), - hunk: Hunk { - context: "context\ndeleted\n".into(), - edits: vec![Edit { - range: 8..16, - text: "added line".into() - }], - start_line: Some(0), // @@ -1,2 -> line 0 (0-indexed) - }, - status: FileStatus::Modified, - }, - DiffEvent::FileEnd { renamed_to: None } - ], - ); - } - - #[test] - fn test_double_no_newline_at_eof() { - // Two consecutive "no newline" markers - the second should be ignored - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,3 +1,3 @@ - line1 - -old - +new - line3 - \\ No newline at end of file - \\ No newline at end of file - "}; - - let mut events = Vec::new(); - let mut parser = DiffParser::new(diff); - while let Some(event) = parser.next().unwrap() { - events.push(event); - } - - assert_eq!( - events, - &[ - DiffEvent::Hunk { - path: "file.txt".into(), - hunk: Hunk { - context: "line1\nold\nline3".into(), // Only one newline removed - edits: vec![Edit { - range: 6..10, // "old\n" is 4 bytes - text: "new\n".into() - }], - start_line: Some(0), - }, - status: FileStatus::Modified, - }, - DiffEvent::FileEnd { renamed_to: None } - ], - ); - } - - #[test] - fn test_no_newline_after_context_not_addition() { - // "No newline" after context lines should remove newline from context, - // not from an earlier addition - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,4 +1,4 @@ - line1 - -old - +new - line3 - line4 - \\ No newline at end of file - "}; - - let mut events = Vec::new(); - let mut parser = DiffParser::new(diff); - while let Some(event) = parser.next().unwrap() { - events.push(event); - } - - assert_eq!( - events, - &[ - DiffEvent::Hunk { - path: "file.txt".into(), - hunk: Hunk { - // newline removed from line4 (context), not from "new" (addition) - context: "line1\nold\nline3\nline4".into(), - edits: vec![Edit { - range: 6..10, // "old\n" is 4 bytes - text: "new\n".into() // Still has newline - }], - start_line: Some(0), - }, - status: FileStatus::Modified, - }, - DiffEvent::FileEnd { renamed_to: None } - ], - ); - } - #[test] fn test_line_number_disambiguation() { // Test that line numbers from hunk headers are used to disambiguate @@ -1536,197 +593,6 @@ mod tests { assert_eq!(cursor_column, " let x = ".len()); } - #[test] - fn test_strip_diff_metadata() { - let diff_with_metadata = indoc! {r#" - diff --git a/file.txt b/file.txt - index 1234567..abcdefg 100644 - --- a/file.txt - +++ b/file.txt - @@ -1,3 +1,4 @@ - context line - -removed line - +added line - more context - "#}; - - let stripped = strip_diff_metadata(diff_with_metadata); - - assert_eq!( - stripped, - indoc! {r#" - --- a/file.txt - +++ b/file.txt - @@ -1,3 +1,4 @@ - context line - -removed line - +added line - more context - "#} - ); - } - - #[test] - fn test_apply_diff_to_string_no_trailing_newline() { - // Text without trailing newline; diff generated without - // `\ No newline at end of file` marker. - let text = "line1\nline2\nline3"; - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,3 +1,3 @@ - line1 - -line2 - +replaced - line3 - "}; - - let result = apply_diff_to_string(diff, text).unwrap(); - assert_eq!(result, "line1\nreplaced\nline3"); - } - - #[test] - fn test_apply_diff_to_string_trailing_newline_present() { - // When text has a trailing newline, exact matching still works and - // the fallback is never needed. - let text = "line1\nline2\nline3\n"; - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,3 +1,3 @@ - line1 - -line2 - +replaced - line3 - "}; - - let result = apply_diff_to_string(diff, text).unwrap(); - assert_eq!(result, "line1\nreplaced\nline3\n"); - } - - #[test] - fn test_apply_diff_to_string_deletion_at_end_no_trailing_newline() { - // Deletion of the last line when text has no trailing newline. - // The edit range must be clamped so it doesn't index past the - // end of the text. - let text = "line1\nline2\nline3"; - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,3 +1,2 @@ - line1 - line2 - -line3 - "}; - - let result = apply_diff_to_string(diff, text).unwrap(); - assert_eq!(result, "line1\nline2\n"); - } - - #[test] - fn test_apply_diff_to_string_replace_last_line_no_trailing_newline() { - // Replace the last line when text has no trailing newline. - let text = "aaa\nbbb\nccc"; - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,3 +1,3 @@ - aaa - bbb - -ccc - +ddd - "}; - - let result = apply_diff_to_string(diff, text).unwrap(); - assert_eq!(result, "aaa\nbbb\nddd"); - } - - #[test] - fn test_apply_diff_to_string_multibyte_no_trailing_newline() { - // Multi-byte UTF-8 characters near the end; ensures char boundary - // safety when the fallback clamps edit ranges. - let text = "hello\n세계"; - let diff = indoc! {" - --- a/file.txt - +++ b/file.txt - @@ -1,2 +1,2 @@ - hello - -세계 - +world - "}; - - let result = apply_diff_to_string(diff, text).unwrap(); - assert_eq!(result, "hello\nworld"); - } - - #[test] - fn test_find_context_candidates_no_false_positive_mid_text() { - // The stripped fallback must only match at the end of text, not in - // the middle where a real newline exists. - let text = "aaa\nbbb\nccc\n"; - let mut hunk = Hunk { - context: "bbb\n".into(), - edits: vec![], - start_line: None, - }; - - let candidates = find_context_candidates(text, &mut hunk); - // Exact match at offset 4 — the fallback is not used. - assert_eq!(candidates, vec![4]); - } - - #[test] - fn test_find_context_candidates_fallback_at_end() { - let text = "aaa\nbbb"; - let mut hunk = Hunk { - context: "bbb\n".into(), - edits: vec![], - start_line: None, - }; - - let candidates = find_context_candidates(text, &mut hunk); - assert_eq!(candidates, vec![4]); - // Context should be stripped. - assert_eq!(hunk.context, "bbb"); - } - - #[test] - fn test_find_context_candidates_no_fallback_mid_text() { - // "bbb" appears mid-text followed by a newline, so the exact - // match succeeds. Verify the stripped fallback doesn't produce a - // second, spurious candidate. - let text = "aaa\nbbb\nccc"; - let mut hunk = Hunk { - context: "bbb\nccc\n".into(), - edits: vec![], - start_line: None, - }; - - let candidates = find_context_candidates(text, &mut hunk); - // No exact match (text ends without newline after "ccc"), but the - // stripped context "bbb\nccc" matches at offset 4, which is the end. - assert_eq!(candidates, vec![4]); - assert_eq!(hunk.context, "bbb\nccc"); - } - - #[test] - fn test_find_context_candidates_clamps_edit_ranges() { - let text = "aaa\nbbb"; - let mut hunk = Hunk { - context: "aaa\nbbb\n".into(), - edits: vec![Edit { - range: 4..8, // "bbb\n" — end points at the trailing \n - text: "ccc\n".into(), - }], - start_line: None, - }; - - let candidates = find_context_candidates(text, &mut hunk); - assert_eq!(candidates, vec![0]); - // Edit range end should be clamped to 7 (new context length). - assert_eq!(hunk.edits[0].range, 4..7); - } - #[test] fn test_edits_for_diff_no_trailing_newline() { let content = "foo\nbar\nbaz"; diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index fdfe3ebcf06c8319f5ce00066fa279d79eda7eea..b4556e58b9247624e2d4caeddb5614ff5000d854 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -24,8 +24,9 @@ use zeta_prompt::{ParsedOutput, ZetaPromptInput}; use std::{env, ops::Range, path::Path, sync::Arc}; use zeta_prompt::{ - CURSOR_MARKER, ZetaFormat, format_zeta_prompt, get_prefill, parse_zeta2_model_output, - prompt_input_contains_special_tokens, stop_tokens_for_format, + ZetaFormat, format_zeta_prompt, get_prefill, parse_zeta2_model_output, + parsed_output_from_editable_region, prompt_input_contains_special_tokens, + stop_tokens_for_format, zeta1::{self, EDITABLE_REGION_END_MARKER}, }; @@ -181,6 +182,7 @@ pub fn request_prediction_with_zeta( let parsed_output = output_text.map(|text| ParsedOutput { new_editable_region: text, range_in_excerpt: editable_range_in_excerpt, + cursor_offset_in_new_editable_region: None, }); (request_id, parsed_output, None, None) @@ -283,10 +285,10 @@ pub fn request_prediction_with_zeta( let request_id = EditPredictionId(response.request_id.into()); let output_text = Some(response.output).filter(|s| !s.is_empty()); let model_version = response.model_version; - let parsed_output = ParsedOutput { - new_editable_region: output_text.unwrap_or_default(), - range_in_excerpt: response.editable_range, - }; + let parsed_output = parsed_output_from_editable_region( + response.editable_range, + output_text.unwrap_or_default(), + ); Some((request_id, Some(parsed_output), model_version, usage)) }) @@ -299,6 +301,7 @@ pub fn request_prediction_with_zeta( let Some(ParsedOutput { new_editable_region: mut output_text, range_in_excerpt: editable_range_in_excerpt, + cursor_offset_in_new_editable_region: cursor_offset_in_output, }) = output else { return Ok((Some((request_id, None)), None)); @@ -312,13 +315,6 @@ pub fn request_prediction_with_zeta( .text_for_range(editable_range_in_buffer.clone()) .collect::(); - // Client-side cursor marker processing (applies to both raw and v3 responses) - let cursor_offset_in_output = output_text.find(CURSOR_MARKER); - if let Some(offset) = cursor_offset_in_output { - log::trace!("Stripping out {CURSOR_MARKER} from response at offset {offset}"); - output_text.replace_range(offset..offset + CURSOR_MARKER.len(), ""); - } - if let Some(debug_tx) = &debug_tx { debug_tx .unbounded_send(DebugEvent::EditPredictionFinished( diff --git a/crates/edit_prediction_cli/Cargo.toml b/crates/edit_prediction_cli/Cargo.toml index 83a78641bc2b14a9ea92cc0eae674135444ac691..323ee3de41902b2140f95da22b0e37fb98d31fd5 100644 --- a/crates/edit_prediction_cli/Cargo.toml +++ b/crates/edit_prediction_cli/Cargo.toml @@ -8,6 +8,9 @@ license = "GPL-3.0-or-later" [lints] workspace = true +[lib] +path = "src/lib.rs" + [[bin]] name = "ep" path = "src/main.rs" @@ -80,9 +83,14 @@ dynamic_prompts = [] ignored = ["wasmtime"] [dev-dependencies] +criterion.workspace = true gpui = { workspace = true, features = ["test-support"] } indoc.workspace = true pretty_assertions.workspace = true project = { workspace = true, features = ["test-support"] } tempfile.workspace = true workspace = { workspace = true, features = ["test-support"] } + +[[bench]] +name = "kept_rate" +harness = false diff --git a/crates/edit_prediction_cli/benches/kept_rate.rs b/crates/edit_prediction_cli/benches/kept_rate.rs new file mode 100644 index 0000000000000000000000000000000000000000..eccbb42dc0591ee15a0b942a4c326d0e4f2123ee --- /dev/null +++ b/crates/edit_prediction_cli/benches/kept_rate.rs @@ -0,0 +1,128 @@ +use criterion::{BenchmarkId, Criterion, black_box, criterion_group, criterion_main}; +use edit_prediction_cli::kept_rate::compute_kept_rate; + +fn repeated_function_lines(line_count: usize) -> String { + let mut text = String::with_capacity(line_count * 32); + for index in 0..line_count { + text.push_str("fn helper_"); + text.push_str(&(index % 16).to_string()); + text.push_str("() { value += old_name + 1; }\n"); + } + text +} + +fn localized_rename_inputs(line_count: usize) -> (String, String, String) { + let base = repeated_function_lines(line_count); + let mut predicted = base.clone(); + let mut final_text = base.clone(); + + let needle = "value += old_name + 1;"; + let prediction = "value += very_long_predicted_name + 1;"; + let accepted = "value += new_name + 1;"; + + let offset = base + .rfind(needle) + .expect("expected needle in synthetic input"); + let end = offset + needle.len(); + + predicted.replace_range(offset..end, prediction); + final_text.replace_range(offset..end, accepted); + + (base, predicted, final_text) +} + +fn identical_new_content_inputs(line_count: usize) -> (String, String, String) { + let predicted = repeated_function_lines(line_count); + (String::new(), predicted.clone(), predicted) +} + +fn repetitive_token_inputs(token_repetitions: usize) -> (String, String, String) { + let repeated_old = "foo + foo + foo + foo + foo\n".repeat(token_repetitions); + let repeated_predicted = "foo + foo + prediction_token + foo + foo\n".repeat(token_repetitions); + let repeated_final = "foo + foo + kept_token + foo + foo\n".repeat(token_repetitions); + (repeated_old, repeated_predicted, repeated_final) +} + +fn kept_rate_benchmark(c: &mut Criterion) { + let mut no_change_group = c.benchmark_group("kept_rate/no_change"); + for line_count in [128usize, 512, 2048] { + let text = repeated_function_lines(line_count); + no_change_group.bench_with_input( + BenchmarkId::new("lines", line_count), + &text, + |bench, text| { + bench.iter(|| { + black_box(compute_kept_rate( + black_box(text), + black_box(text), + black_box(text), + )); + }); + }, + ); + } + no_change_group.finish(); + + let mut localized_group = c.benchmark_group("kept_rate/localized_rename"); + for line_count in [128usize, 512, 2048] { + let inputs = localized_rename_inputs(line_count); + localized_group.bench_with_input( + BenchmarkId::new("lines", line_count), + &inputs, + |bench, inputs| { + let (base, predicted, final_text) = inputs; + bench.iter(|| { + black_box(compute_kept_rate( + black_box(base), + black_box(predicted), + black_box(final_text), + )); + }); + }, + ); + } + localized_group.finish(); + + let mut addition_group = c.benchmark_group("kept_rate/identical_addition"); + for line_count in [128usize, 512, 2048] { + let inputs = identical_new_content_inputs(line_count); + addition_group.bench_with_input( + BenchmarkId::new("lines", line_count), + &inputs, + |bench, inputs| { + let (base, predicted, final_text) = inputs; + bench.iter(|| { + black_box(compute_kept_rate( + black_box(base), + black_box(predicted), + black_box(final_text), + )); + }); + }, + ); + } + addition_group.finish(); + + let mut repetitive_group = c.benchmark_group("kept_rate/repetitive_tokens"); + for token_repetitions in [64usize, 256, 1024] { + let inputs = repetitive_token_inputs(token_repetitions); + repetitive_group.bench_with_input( + BenchmarkId::new("repetitions", token_repetitions), + &inputs, + |bench, inputs| { + let (base, predicted, final_text) = inputs; + bench.iter(|| { + black_box(compute_kept_rate( + black_box(base), + black_box(predicted), + black_box(final_text), + )); + }); + }, + ); + } + repetitive_group.finish(); +} + +criterion_group!(benches, kept_rate_benchmark); +criterion_main!(benches); diff --git a/crates/edit_prediction_cli/src/example.rs b/crates/edit_prediction_cli/src/example.rs index 4827337d37a211056d04cf9ca13f8d49fb91c392..682671141d050836d25705b2732f11500f159209 100644 --- a/crates/edit_prediction_cli/src/example.rs +++ b/crates/edit_prediction_cli/src/example.rs @@ -184,6 +184,8 @@ pub struct ExampleScore { #[serde(default)] pub deleted_tokens: usize, #[serde(default, skip_serializing_if = "Option::is_none")] + pub kept_rate: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] pub cumulative_logprob: Option, #[serde(default, skip_serializing_if = "Option::is_none")] pub avg_logprob: Option, diff --git a/crates/edit_prediction_cli/src/format_prompt.rs b/crates/edit_prediction_cli/src/format_prompt.rs index 2a1b49007bd19e721a6d95ebddda3758c86aaaef..24a6f1acd470fb8ee77e87d993079298f45b390c 100644 --- a/crates/edit_prediction_cli/src/format_prompt.rs +++ b/crates/edit_prediction_cli/src/format_prompt.rs @@ -6,11 +6,11 @@ use crate::{ retrieve_context::run_context_retrieval, }; use anyhow::{Context as _, Result, anyhow}; -use edit_prediction::udiff; use gpui::AsyncApp; use similar::DiffableStr; use std::ops::Range; use std::sync::Arc; +use zeta_prompt::udiff; use zeta_prompt::{ ZetaFormat, encode_patch_as_output_for_format, excerpt_range_for_format, format_zeta_prompt, multi_region, output_end_marker_for_format, resolve_cursor_region, diff --git a/crates/edit_prediction_cli/src/headless.rs b/crates/edit_prediction_cli/src/headless.rs index 3a204a7052f8a41d6e7c2c49860b62f588358644..48b7381020f48d868d9f6413ef343b30718e5be6 100644 --- a/crates/edit_prediction_cli/src/headless.rs +++ b/crates/edit_prediction_cli/src/headless.rs @@ -1,4 +1,4 @@ -use client::{Client, ProxySettings, UserStore}; +use client::{Client, ProxySettings, RefreshLlmTokenListener, UserStore}; use db::AppDatabase; use extension::ExtensionHostProxy; use fs::RealFs; @@ -109,7 +109,8 @@ pub fn init(cx: &mut App) -> EpAppState { debug_adapter_extension::init(extension_host_proxy.clone(), cx); language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone()); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store.clone(), client.clone(), cx); languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx); prompt_store::init(cx); diff --git a/crates/edit_prediction_cli/src/kept_rate.rs b/crates/edit_prediction_cli/src/kept_rate.rs new file mode 100644 index 0000000000000000000000000000000000000000..565597fd12b567e7f7f23be233b87ba2284a176f --- /dev/null +++ b/crates/edit_prediction_cli/src/kept_rate.rs @@ -0,0 +1,427 @@ +use crate::word_diff::tokenize; + +#[cfg(test)] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum TokenAnnotation { + Context, + Kept, + Discarded, +} + +#[allow(dead_code)] +#[derive(Debug, Clone)] +pub struct KeptRateResult { + pub predicted_new_chars: usize, + pub final_new_chars: usize, + pub kept_chars: usize, + pub discarded_chars: usize, + pub context_chars: usize, + pub kept_rate: f64, + #[cfg(test)] + pub token_annotations: Vec, +} + +fn dp_index(width: usize, row: usize, column: usize) -> usize { + row * width + column +} + +/// Return masks over `a` and `b` using one-sided LCS tie-breaking for each +/// side while sharing a single DP table construction. +fn lcs_keep_masks(a: &[&str], b: &[&str]) -> (Vec, Vec) { + if a.is_empty() || b.is_empty() { + return (vec![false; a.len()], vec![false; b.len()]); + } + + if a == b { + return (vec![true; a.len()], vec![true; b.len()]); + } + + let mut keep_a = vec![false; a.len()]; + let mut keep_b = vec![false; b.len()]; + + let prefix_len = a + .iter() + .zip(b.iter()) + .take_while(|(left, right)| left == right) + .count(); + let suffix_len = { + let max_suffix = (a.len() - prefix_len).min(b.len() - prefix_len); + let mut suffix_len = 0; + + while suffix_len < max_suffix { + let a_index = a.len() - 1 - suffix_len; + let b_index = b.len() - 1 - suffix_len; + if a[a_index] != b[b_index] { + break; + } + suffix_len += 1; + } + + suffix_len + }; + + for index in 0..prefix_len { + keep_a[index] = true; + keep_b[index] = true; + } + + for offset in 0..suffix_len { + let a_index = a.len() - suffix_len + offset; + let b_index = b.len() - suffix_len + offset; + keep_a[a_index] = true; + keep_b[b_index] = true; + } + + let a_mid = &a[prefix_len..a.len() - suffix_len]; + let b_mid = &b[prefix_len..b.len() - suffix_len]; + + if a_mid.is_empty() || b_mid.is_empty() { + return (keep_a, keep_b); + } + + let row_count = a_mid.len() + 1; + let column_count = b_mid.len() + 1; + let mut dp = vec![0u32; row_count * column_count]; + + for i in 1..row_count { + let token_a = a_mid[i - 1]; + for j in 1..column_count { + let index = dp_index(column_count, i, j); + if token_a == b_mid[j - 1] { + dp[index] = dp[dp_index(column_count, i - 1, j - 1)] + 1; + } else { + let up = dp[dp_index(column_count, i - 1, j)]; + let left = dp[dp_index(column_count, i, j - 1)]; + dp[index] = up.max(left); + } + } + } + + let mut i = a_mid.len(); + let mut j = b_mid.len(); + + while i > 0 && j > 0 { + if a_mid[i - 1] == b_mid[j - 1] { + keep_a[prefix_len + i - 1] = true; + i -= 1; + j -= 1; + } else { + let up = dp[dp_index(column_count, i - 1, j)]; + let left = dp[dp_index(column_count, i, j - 1)]; + if up >= left { + i -= 1; + } else { + j -= 1; + } + } + } + + let mut i = a_mid.len(); + let mut j = b_mid.len(); + + while i > 0 && j > 0 { + if a_mid[i - 1] == b_mid[j - 1] { + keep_b[prefix_len + j - 1] = true; + i -= 1; + j -= 1; + } else { + let up = dp[dp_index(column_count, i - 1, j)]; + let left = dp[dp_index(column_count, i, j - 1)]; + if left >= up { + j -= 1; + } else { + i -= 1; + } + } + } + + (keep_a, keep_b) +} + +fn analyze_masked_tokens<'a>(tokens: &[&'a str], mask: &[bool]) -> (Vec<&'a str>, usize, usize) { + let mut unmasked_tokens = Vec::with_capacity(tokens.len()); + let mut unmasked_chars = 0; + let mut masked_chars = 0; + + for (&token, &is_masked) in tokens.iter().zip(mask.iter()) { + if is_masked { + masked_chars += token.len(); + } else { + unmasked_tokens.push(token); + unmasked_chars += token.len(); + } + } + + (unmasked_tokens, unmasked_chars, masked_chars) +} + +pub fn compute_kept_rate(base: &str, predicted: &str, final_text: &str) -> KeptRateResult { + if base == predicted && predicted == final_text { + let predicted_tokens = tokenize(predicted); + let context_chars = predicted_tokens.iter().map(|token| token.len()).sum(); + return KeptRateResult { + predicted_new_chars: 0, + final_new_chars: 0, + kept_chars: 0, + discarded_chars: 0, + context_chars, + kept_rate: 1.0, + #[cfg(test)] + token_annotations: vec![TokenAnnotation::Context; predicted_tokens.len()], + }; + } + + let base_tokens = tokenize(base); + let predicted_tokens = tokenize(predicted); + let final_tokens = tokenize(final_text); + + let (pred_base_mask, _) = lcs_keep_masks(&predicted_tokens, &base_tokens); + let (pred_final_mask, final_pred_mask) = lcs_keep_masks(&predicted_tokens, &final_tokens); + let context_mask: Vec = pred_base_mask + .iter() + .zip(pred_final_mask.iter()) + .map(|(&in_base, &in_final)| in_base && in_final) + .collect(); + + let (stripped_predicted, predicted_new_chars, context_chars) = + analyze_masked_tokens(&predicted_tokens, &context_mask); + + let (final_base_mask, _) = lcs_keep_masks(&final_tokens, &base_tokens); + let final_context_mask: Vec = final_base_mask + .iter() + .zip(final_pred_mask.iter()) + .map(|(&in_base, &in_predicted)| in_base && in_predicted) + .collect(); + + let (stripped_final, final_new_chars, _) = + analyze_masked_tokens(&final_tokens, &final_context_mask); + + let keep_mask = lcs_keep_masks(&stripped_predicted, &stripped_final).0; + + let kept_chars: usize = stripped_predicted + .iter() + .zip(keep_mask.iter()) + .filter_map(|(&token, &is_kept)| is_kept.then_some(token.len())) + .sum(); + + let discarded_chars = predicted_new_chars - kept_chars; + + let kept_rate = if predicted_new_chars == 0 { + if final_new_chars == 0 { 1.0 } else { 0.0 } + } else { + kept_chars as f64 / predicted_new_chars as f64 + }; + + #[cfg(test)] + let token_annotations = { + let mut token_annotations = Vec::with_capacity(predicted_tokens.len()); + let mut new_index = 0; + for (token_index, _token) in predicted_tokens.iter().enumerate() { + if context_mask[token_index] { + token_annotations.push(TokenAnnotation::Context); + } else { + let annotation = if keep_mask[new_index] { + TokenAnnotation::Kept + } else { + TokenAnnotation::Discarded + }; + #[cfg(test)] + token_annotations.push(annotation); + new_index += 1; + } + } + token_annotations + }; + + KeptRateResult { + predicted_new_chars, + final_new_chars, + kept_chars, + discarded_chars, + context_chars, + kept_rate, + #[cfg(test)] + token_annotations, + } +} + +#[cfg(test)] +mod test_kept_rate { + use super::*; + + #[test] + fn test_lcs_keep_masks() { + let (a_mask, b_mask) = lcs_keep_masks(&["a", "b", "c", "d", "e"], &["a", "c", "e"]); + assert_eq!(a_mask, vec![true, false, true, false, true]); + assert_eq!(b_mask, vec![true, true, true]); + + let (a_mask, b_mask) = lcs_keep_masks(&[], &["x"]); + assert!(a_mask.is_empty()); + assert_eq!(b_mask, vec![false]); + } + + #[test] + fn test_lcs_keep_masks_matches_historical_one_sided_masks() { + let a = ["x", "a", "x", "b"]; + let b = ["a", "x", "b", "x"]; + let (a_mask, b_mask) = lcs_keep_masks(&a, &b); + assert_eq!(a_mask, lcs_keep_masks(&a, &b).0); + assert_eq!(b_mask, lcs_keep_masks(&b, &a).0); + } + + #[test] + fn test_rate_extremes() { + let no_change = compute_kept_rate("foo bar", "foo bar", "foo bar"); + assert!((no_change.kept_rate - 1.0).abs() < 1e-6); + assert_eq!(no_change.predicted_new_chars, 0); + assert!( + no_change + .token_annotations + .iter() + .all(|&annotation| annotation == TokenAnnotation::Context) + ); + + let accepted = compute_kept_rate("old", "new", "new"); + assert!((accepted.kept_rate - 1.0).abs() < 1e-6); + + let discarded = compute_kept_rate("old", "old", "new"); + assert!((discarded.kept_rate - 0.0).abs() < 1e-6); + } + + #[test] + fn test_pure_addition() { + let kept = compute_kept_rate("", "brand new line\n", "brand new line\n"); + assert_eq!(kept.kept_chars, kept.predicted_new_chars); + assert!( + kept.token_annotations + .iter() + .all(|&annotation| annotation == TokenAnnotation::Kept) + ); + + let discarded = + compute_kept_rate("", "brand new line\n", "something completely different\n"); + assert!(discarded.kept_chars < discarded.predicted_new_chars); + } + + #[test] + fn test_decoy_when_base_excluded() { + let base = " decoy.when(mock_sync_hardware_api.sp()).then_return(SpeedStatus.IDLE)\n"; + let predicted = " decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n"; + let final_text = " decoy.when(mock_sync_module_hardware.speed_status).then_return(SpeedStatus.IDLE)\n"; + let result = compute_kept_rate(base, predicted, final_text); + let expected_new = "mock_sync_module_hardware".len() + "speed_status".len(); + assert_eq!(result.predicted_new_chars, expected_new); + assert!((result.kept_rate - 1.0).abs() < 1e-6); + } + + #[test] + fn test_missing_deletion() { + let base = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n epr\n"; + let predicted = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n epr\neprintln!(\"\");\n"; + let final_text = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n eprintln!(\"\");\n"; + let result = compute_kept_rate(base, predicted, final_text); + assert!( + result.kept_rate < 0.85, + "expected kept_rate < 0.85, got {}", + result.kept_rate + ); + assert!(result.discarded_chars > 0); + } + + #[test] + fn test_empty_prediction() { + let result = compute_kept_rate("old line\n", "", "new line\n"); + assert!((result.kept_rate - 0.0).abs() < 1e-6); + } + + #[test] + fn test_partial_kept() { + let result = compute_kept_rate("old\n", "alpha\nbeta\ngamma\n", "alpha\ngamma\n"); + assert!(result.kept_chars > 0); + assert!(result.discarded_chars > 0); + assert!(result.kept_rate > 0.0 && result.kept_rate < 1.0); + } + + #[test] + fn test_eprintln_token_alignment() { + let base = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n epr\n"; + let predicted = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n eprintln!(\"hello world!\");\n"; + let final_text = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n eprintln!(\"\");\n"; + let result = compute_kept_rate(base, predicted, final_text); + assert!(result.discarded_chars > 0); + assert!(result.kept_chars > 0); + assert!(result.kept_rate > 0.0 && result.kept_rate < 1.0); + assert_eq!(result.kept_chars, 14); + assert_eq!(result.discarded_chars, 12); + } + + #[test] + fn test_annotations_rename() { + let base = " foo(old_name)\n"; + let predicted = " foo(new_name)\n"; + let final_text = " foo(new_name)\n"; + let result = compute_kept_rate(base, predicted, final_text); + + assert_eq!(result.predicted_new_chars, "new_name".len()); + assert_eq!(result.token_annotations.len(), tokenize(predicted).len()); + + for (&token, &annotation) in tokenize(predicted).iter().zip(&result.token_annotations) { + if token == "new_name" { + assert_eq!(annotation, TokenAnnotation::Kept); + } else { + assert_eq!(annotation, TokenAnnotation::Context); + } + } + } + + #[test] + fn test_annotations_eprintln_coloring() { + let base = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n epr\n"; + let predicted = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n eprintln!(\"hello world!\");\n"; + let final_text = " fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) {\n eprintln!(\"\");\n"; + let result = compute_kept_rate(base, predicted, final_text); + let predicted_tokens = tokenize(predicted); + + let eprintln_index = predicted_tokens + .iter() + .position(|&token| token == "eprintln") + .expect("eprintln token not found"); + + for annotation in &result.token_annotations[..eprintln_index] { + assert_eq!(*annotation, TokenAnnotation::Context); + } + + assert_eq!( + &result.token_annotations[eprintln_index..=eprintln_index + 10], + &[ + TokenAnnotation::Kept, + TokenAnnotation::Kept, + TokenAnnotation::Kept, + TokenAnnotation::Kept, + TokenAnnotation::Discarded, + TokenAnnotation::Discarded, + TokenAnnotation::Discarded, + TokenAnnotation::Discarded, + TokenAnnotation::Kept, + TokenAnnotation::Kept, + TokenAnnotation::Kept, + ] + ); + assert_eq!( + result.token_annotations.last(), + Some(&TokenAnnotation::Context) + ); + } + + #[test] + fn test_repetitive_tokens_remain_discarded() { + let base = "foo + foo + foo + foo + foo\n".repeat(16); + let predicted = "foo + foo + prediction_token + foo + foo\n".repeat(16); + let final_text = "foo + foo + kept_token + foo + foo\n".repeat(16); + let result = compute_kept_rate(&base, &predicted, &final_text); + + assert_eq!(result.kept_chars, 0); + assert_eq!(result.discarded_chars, result.predicted_new_chars); + assert_eq!(result.predicted_new_chars, "prediction_token".len() * 16); + } +} diff --git a/crates/edit_prediction_cli/src/lib.rs b/crates/edit_prediction_cli/src/lib.rs new file mode 100644 index 0000000000000000000000000000000000000000..920bd942675b460c1a292cda7024ad914ba8167c --- /dev/null +++ b/crates/edit_prediction_cli/src/lib.rs @@ -0,0 +1,4 @@ +#[allow(dead_code)] +mod word_diff; + +pub mod kept_rate; diff --git a/crates/edit_prediction_cli/src/main.rs b/crates/edit_prediction_cli/src/main.rs index cf9232a04a40df507c187d53becfedcd8db03188..0f29d33947612d64b74f4fd847957ced5ad359a4 100644 --- a/crates/edit_prediction_cli/src/main.rs +++ b/crates/edit_prediction_cli/src/main.rs @@ -5,6 +5,7 @@ mod filter_languages; mod format_prompt; mod git; mod headless; +mod kept_rate; mod load_project; mod metrics; mod openai_client; diff --git a/crates/edit_prediction_cli/src/metrics.rs b/crates/edit_prediction_cli/src/metrics.rs index 8037699f4bb6f851fdadb05b435b090b911b010a..ffa26beea6eeb52a9dfdfe823ad474f9e63627a8 100644 --- a/crates/edit_prediction_cli/src/metrics.rs +++ b/crates/edit_prediction_cli/src/metrics.rs @@ -1297,3 +1297,5 @@ index abc123..def456 100644 ); } } + +pub use crate::kept_rate::compute_kept_rate; diff --git a/crates/edit_prediction_cli/src/parse_output.rs b/crates/edit_prediction_cli/src/parse_output.rs index 2b41384e176ac7a6cc5c3dc7f93ddbba3cf027ae..fc85afa371a4edfe8080d602000c38ecedb98c86 100644 --- a/crates/edit_prediction_cli/src/parse_output.rs +++ b/crates/edit_prediction_cli/src/parse_output.rs @@ -5,8 +5,7 @@ use crate::{ repair, }; use anyhow::{Context as _, Result}; -use edit_prediction::example_spec::encode_cursor_in_patch; -use zeta_prompt::{CURSOR_MARKER, ZetaFormat, parse_zeta2_model_output}; +use zeta_prompt::{ZetaFormat, parse_zeta2_model_output, parsed_output_to_patch}; pub fn run_parse_output(example: &mut Example) -> Result<()> { example @@ -65,46 +64,18 @@ fn parse_zeta2_output( .context("prompt_inputs required")?; let parsed = parse_zeta2_model_output(actual_output, format, prompt_inputs)?; - let range_in_excerpt = parsed.range_in_excerpt; - + let range_in_excerpt = parsed.range_in_excerpt.clone(); let excerpt = prompt_inputs.cursor_excerpt.as_ref(); - let old_text = excerpt[range_in_excerpt.clone()].to_string(); - let mut new_text = parsed.new_editable_region; - - let cursor_offset = if let Some(offset) = new_text.find(CURSOR_MARKER) { - new_text.replace_range(offset..offset + CURSOR_MARKER.len(), ""); - Some(offset) - } else { - None - }; + let editable_region_offset = range_in_excerpt.start; + let editable_region_start_line = excerpt[..editable_region_offset].matches('\n').count(); - // Normalize trailing newlines for diff generation - let mut old_text_normalized = old_text; + let mut new_text = parsed.new_editable_region.clone(); if !new_text.is_empty() && !new_text.ends_with('\n') { new_text.push('\n'); } - if !old_text_normalized.is_empty() && !old_text_normalized.ends_with('\n') { - old_text_normalized.push('\n'); - } - - let editable_region_offset = range_in_excerpt.start; - let editable_region_start_line = excerpt[..editable_region_offset].matches('\n').count(); - let editable_region_lines = old_text_normalized.lines().count() as u32; - - let diff = language::unified_diff_with_context( - &old_text_normalized, - &new_text, - editable_region_start_line as u32, - editable_region_start_line as u32, - editable_region_lines, - ); - - let formatted_diff = format!( - "--- a/{path}\n+++ b/{path}\n{diff}", - path = example.spec.cursor_path.to_string_lossy(), - ); - let formatted_diff = encode_cursor_in_patch(&formatted_diff, cursor_offset); + let cursor_offset = parsed.cursor_offset_in_new_editable_region; + let formatted_diff = parsed_output_to_patch(prompt_inputs, parsed)?; let actual_cursor = cursor_offset.map(|editable_region_cursor_offset| { ActualCursor::from_editable_region( diff --git a/crates/edit_prediction_cli/src/reversal_tracking.rs b/crates/edit_prediction_cli/src/reversal_tracking.rs index 60661cea04beae4aba4713ac86b51fab42c91979..34ddfd5f5ec0edca2b5de64a6f033a6463dcc133 100644 --- a/crates/edit_prediction_cli/src/reversal_tracking.rs +++ b/crates/edit_prediction_cli/src/reversal_tracking.rs @@ -2,8 +2,8 @@ use std::ops::Range; use std::path::Path; use std::sync::Arc; -use edit_prediction::udiff::apply_diff_to_string; use language::{char_diff, text_diff}; +use zeta_prompt::udiff::apply_diff_to_string; use zeta_prompt::ZetaPromptInput; @@ -653,9 +653,9 @@ pub fn compute_prediction_reversal_ratio( #[cfg(test)] mod tests { use super::*; - use edit_prediction::udiff::apply_diff_to_string; use indoc::indoc; use zeta_prompt::ExcerptRanges; + use zeta_prompt::udiff::apply_diff_to_string; fn make_test_prompt_inputs( content: &str, diff --git a/crates/edit_prediction_cli/src/score.rs b/crates/edit_prediction_cli/src/score.rs index be9b185809e6e0cd49e0befbeecec0f317339342..1dace832d4998362610e860b386f4db49f965144 100644 --- a/crates/edit_prediction_cli/src/score.rs +++ b/crates/edit_prediction_cli/src/score.rs @@ -10,13 +10,13 @@ use crate::{ reversal_tracking, }; use anyhow::Context as _; -use edit_prediction::udiff::{apply_diff_to_string, apply_diff_to_string_with_hunk_offset}; use gpui::AsyncApp; use serde::Serialize; use std::fs::File; use std::io::BufWriter; use std::path::Path; use std::sync::Arc; +use zeta_prompt::udiff::{apply_diff_to_string, apply_diff_to_string_with_hunk_offset}; pub async fn run_scoring( example: &mut Example, @@ -84,6 +84,7 @@ pub async fn run_scoring( has_isolated_whitespace_changes: false, inserted_tokens: 0, deleted_tokens: 0, + kept_rate: None, cumulative_logprob: None, avg_logprob: None, }; @@ -120,12 +121,14 @@ pub async fn run_scoring( let mut best_delta_chr_f_metrics = metrics::DeltaChrFMetrics::default(); let mut best_expected_cursor: Option = None; let mut best_patch_idx: Option = None; + let mut best_expected_text: Option<&str> = None; for (idx, expected) in expected_texts.iter().enumerate() { let delta_chr_f_metrics = metrics::delta_chr_f(original_text, expected, &actual_text); if delta_chr_f_metrics.score > best_delta_chr_f_metrics.score { best_delta_chr_f_metrics = delta_chr_f_metrics; best_patch_idx = Some(idx); + best_expected_text = Some(expected); } } @@ -184,6 +187,10 @@ pub async fn run_scoring( prediction.actual_cursor.as_ref(), ); + let kept_rate = best_expected_text.map(|final_text| { + metrics::compute_kept_rate(original_text, &actual_text, final_text).kept_rate + }); + scores.push(ExampleScore { delta_chr_f: best_delta_chr_f_metrics.score as f32, delta_chr_f_true_positives: best_delta_chr_f_metrics.counts.true_positives, @@ -203,6 +210,7 @@ pub async fn run_scoring( has_isolated_whitespace_changes, inserted_tokens: token_changes.inserted_tokens, deleted_tokens: token_changes.deleted_tokens, + kept_rate, cumulative_logprob: prediction.cumulative_logprob, avg_logprob: prediction.avg_logprob, }); @@ -267,6 +275,8 @@ pub fn print_report(examples: &[Example], verbose: bool) { let mut wrong_editable_region_count: usize = 0; let mut wrong_editable_region_total: usize = 0; let mut isolated_whitespace_count: usize = 0; + let mut kept_rate_sum: f64 = 0.0; + let mut kept_rate_count: usize = 0; let mut patch_inserted_tokens: Vec = Vec::new(); let mut patch_deleted_tokens: Vec = Vec::new(); let mut predictions_with_patch: usize = 0; @@ -359,6 +369,12 @@ pub fn print_report(examples: &[Example], verbose: bool) { isolated_whitespace_count += 1; } + // Accumulate kept rate metrics + if let Some(kr) = score.kept_rate { + kept_rate_sum += kr; + kept_rate_count += 1; + } + // Accumulate token change metrics (only for predictions that produced a patch) let has_patch = example .predictions @@ -488,6 +504,16 @@ pub fn print_report(examples: &[Example], verbose: bool) { println!("Isolated whitespace changes: {}", isolated_ws_str); } + // Print kept rate metrics + if kept_rate_count > 0 { + let avg_kept_rate = kept_rate_sum / kept_rate_count as f64; + println!( + "Kept rate: {:.1}% avg ({} evaluated)", + avg_kept_rate * 100.0, + kept_rate_count + ); + } + // Print token change percentile summary (only for predictions with a patch) if !patch_inserted_tokens.is_empty() { patch_inserted_tokens.sort_unstable(); @@ -590,6 +616,8 @@ pub struct SummaryJson { #[serde(skip_serializing_if = "Option::is_none")] pub wrong_editable_region_rate: Option, pub isolated_whitespace_rate: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub avg_kept_rate: Option, } pub fn compute_summary(examples: &[Example]) -> SummaryJson { @@ -615,6 +643,8 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson { let mut wrong_editable_region_count: usize = 0; let mut wrong_editable_region_total: usize = 0; let mut isolated_whitespace_count: usize = 0; + let mut kept_rate_sum: f64 = 0.0; + let mut kept_rate_count: usize = 0; for example in examples { for (score_idx, score) in example.score.iter().enumerate() { @@ -655,6 +685,12 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson { isolated_whitespace_count += 1; } + // Accumulate kept rate metrics + if let Some(kr) = score.kept_rate { + kept_rate_sum += kr; + kept_rate_count += 1; + } + // Accumulate cursor metrics if let Some(exact_match) = score.cursor_exact_match { cursor_total += 1; @@ -729,6 +765,12 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson { None }; + let avg_kept_rate = if kept_rate_count > 0 { + Some(kept_rate_sum / kept_rate_count as f64) + } else { + None + }; + SummaryJson { total_examples: total_scores, avg_delta_chr_f, @@ -761,6 +803,7 @@ pub fn compute_summary(examples: &[Example]) -> SummaryJson { cursor_total_evaluated, wrong_editable_region_rate, isolated_whitespace_rate, + avg_kept_rate, } } diff --git a/crates/edit_prediction_ui/src/edit_prediction_context_view.rs b/crates/edit_prediction_ui/src/edit_prediction_context_view.rs index 48e74dcdcc102f9ed7844f1b8829e0182fe2c97b..1407ffc73d82c6e564fe46e688b6d6d16a307c01 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_context_view.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_context_view.rs @@ -201,10 +201,14 @@ impl EditPredictionContextView { multibuffer.clear(cx); for (path, buffer, ranges, orders, _) in paths { - let (anchor_ranges, _) = - multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx); - for (anchor_range, order) in anchor_ranges.into_iter().zip(orders) { - excerpt_anchors_with_orders.push((anchor_range.start, order)); + multibuffer.set_excerpts_for_path(path, buffer.clone(), ranges.clone(), 0, cx); + let snapshot = multibuffer.snapshot(cx); + let buffer_snapshot = buffer.read(cx).snapshot(); + for (range, order) in ranges.into_iter().zip(orders) { + let text_anchor = buffer_snapshot.anchor_range_inside(range); + if let Some(start) = snapshot.anchor_in_buffer(text_anchor.start) { + excerpt_anchors_with_orders.push((start, order)); + } } } }); diff --git a/crates/edit_prediction_ui/src/rate_prediction_modal.rs b/crates/edit_prediction_ui/src/rate_prediction_modal.rs index 1fb6c36bc9503e0a2fea7b3f77d1515747d1363c..eb071bf955cede173e74993c93ab5cd294338474 100644 --- a/crates/edit_prediction_ui/src/rate_prediction_modal.rs +++ b/crates/edit_prediction_ui/src/rate_prediction_modal.rs @@ -357,35 +357,26 @@ impl RatePredictionsModal { }); editor.disable_header_for_buffer(new_buffer_id, cx); - let excerpt_id = editor.buffer().update(cx, |multibuffer, cx| { + editor.buffer().update(cx, |multibuffer, cx| { multibuffer.clear(cx); - multibuffer.set_excerpts_for_buffer(new_buffer, [start..end], 0, cx); + multibuffer.set_excerpts_for_buffer(new_buffer.clone(), [start..end], 0, cx); multibuffer.add_diff(diff, cx); - multibuffer.excerpt_ids().into_iter().next() }); - if let Some((excerpt_id, cursor_position)) = - excerpt_id.zip(prediction.cursor_position.as_ref()) - { + if let Some(cursor_position) = prediction.cursor_position.as_ref() { let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx); - if let Some(buffer_snapshot) = - multibuffer_snapshot.buffer_for_excerpt(excerpt_id) - { - let cursor_offset = prediction - .edit_preview - .anchor_to_offset_in_result(cursor_position.anchor) - + cursor_position.offset; - let cursor_anchor = buffer_snapshot.anchor_after(cursor_offset); - - if let Some(anchor) = - multibuffer_snapshot.anchor_in_excerpt(excerpt_id, cursor_anchor) - { - editor.splice_inlays( - &[InlayId::EditPrediction(0)], - vec![Inlay::edit_prediction(0, anchor, "▏")], - cx, - ); - } + let cursor_offset = prediction + .edit_preview + .anchor_to_offset_in_result(cursor_position.anchor) + + cursor_position.offset; + let cursor_anchor = new_buffer.read(cx).snapshot().anchor_after(cursor_offset); + + if let Some(anchor) = multibuffer_snapshot.anchor_in_excerpt(cursor_anchor) { + editor.splice_inlays( + &[InlayId::EditPrediction(0)], + vec![Inlay::edit_prediction(0, anchor, "▏")], + cx, + ); } } }); @@ -991,7 +982,6 @@ impl FeedbackCompletionProvider { impl editor::CompletionProvider for FeedbackCompletionProvider { fn completions( &self, - _excerpt_id: editor::ExcerptId, buffer: &Entity, buffer_position: language::Anchor, _trigger: editor::CompletionContext, diff --git a/crates/editor/src/bracket_colorization.rs b/crates/editor/src/bracket_colorization.rs index 0c9fa29ae6a19ad81ec265cc832a5d3ec15cec51..8c8c3a36e9a73a0b3960f1239f49270647dabea7 100644 --- a/crates/editor/src/bracket_colorization.rs +++ b/crates/editor/src/bracket_colorization.rs @@ -7,9 +7,9 @@ use std::ops::Range; use crate::{Editor, HighlightKey}; use collections::{HashMap, HashSet}; use gpui::{AppContext as _, Context, HighlightStyle}; -use itertools::Itertools; use language::{BufferRow, BufferSnapshot, language_settings::LanguageSettings}; -use multi_buffer::{Anchor, ExcerptId}; +use multi_buffer::{Anchor, BufferOffset, ExcerptRange, MultiBufferSnapshot}; +use text::OffsetRangeExt as _; use ui::{ActiveTheme, utils::ensure_minimum_contrast}; impl Editor { @@ -25,55 +25,49 @@ impl Editor { let accents_count = cx.theme().accents().0.len(); let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); - let visible_excerpts = self.visible_excerpts(false, cx); - let excerpt_data: Vec<(ExcerptId, BufferSnapshot, Range)> = visible_excerpts + let visible_excerpts = self.visible_buffer_ranges(cx); + let excerpt_data: Vec<( + BufferSnapshot, + Range, + ExcerptRange, + )> = visible_excerpts .into_iter() - .filter_map(|(excerpt_id, (buffer, _, buffer_range))| { - let buffer = buffer.read(cx); - let buffer_snapshot = buffer.snapshot(); - if LanguageSettings::for_buffer(&buffer, cx).colorize_brackets { - Some((excerpt_id, buffer_snapshot, buffer_range)) - } else { - None - } + .filter(|(buffer_snapshot, _, _)| { + let Some(buffer) = self.buffer().read(cx).buffer(buffer_snapshot.remote_id()) + else { + return false; + }; + LanguageSettings::for_buffer(buffer.read(cx), cx).colorize_brackets }) .collect(); let mut fetched_tree_sitter_chunks = excerpt_data .iter() - .filter_map(|(excerpt_id, ..)| { + .filter_map(|(_, _, excerpt_range)| { + let key = excerpt_range.context.clone(); Some(( - *excerpt_id, - self.bracket_fetched_tree_sitter_chunks - .get(excerpt_id) - .cloned()?, + key.clone(), + self.bracket_fetched_tree_sitter_chunks.get(&key).cloned()?, )) }) - .collect::>>>(); + .collect::, HashSet>>>(); let bracket_matches_by_accent = cx.background_spawn(async move { - let anchors_in_multi_buffer = |current_excerpt: ExcerptId, - text_anchors: [text::Anchor; 4]| - -> Option<[Option<_>; 4]> { - multi_buffer_snapshot - .anchors_in_excerpt(current_excerpt, text_anchors)? - .collect_array() - }; - let bracket_matches_by_accent: HashMap>> = excerpt_data.into_iter().fold( HashMap::default(), - |mut acc, (excerpt_id, buffer_snapshot, buffer_range)| { - let fetched_chunks = - fetched_tree_sitter_chunks.entry(excerpt_id).or_default(); + |mut acc, (buffer_snapshot, buffer_range, excerpt_range)| { + let fetched_chunks = fetched_tree_sitter_chunks + .entry(excerpt_range.context.clone()) + .or_default(); let brackets_by_accent = compute_bracket_ranges( + &multi_buffer_snapshot, &buffer_snapshot, buffer_range, + excerpt_range, fetched_chunks, - excerpt_id, accents_count, - &anchors_in_multi_buffer, ); for (accent_number, new_ranges) in brackets_by_accent { @@ -144,15 +138,20 @@ impl Editor { } fn compute_bracket_ranges( + multi_buffer_snapshot: &MultiBufferSnapshot, buffer_snapshot: &BufferSnapshot, - buffer_range: Range, + buffer_range: Range, + excerpt_range: ExcerptRange, fetched_chunks: &mut HashSet>, - excerpt_id: ExcerptId, accents_count: usize, - anchors_in_multi_buffer: &impl Fn(ExcerptId, [text::Anchor; 4]) -> Option<[Option; 4]>, ) -> Vec<(usize, Vec>)> { + let context = excerpt_range.context.to_offset(buffer_snapshot); + buffer_snapshot - .fetch_bracket_ranges(buffer_range.start..buffer_range.end, Some(fetched_chunks)) + .fetch_bracket_ranges( + buffer_range.start.0..buffer_range.end.0, + Some(fetched_chunks), + ) .into_iter() .flat_map(|(chunk_range, pairs)| { if fetched_chunks.insert(chunk_range) { @@ -164,37 +163,25 @@ fn compute_bracket_ranges( .filter_map(|pair| { let color_index = pair.color_index?; - let buffer_open_range = buffer_snapshot.anchor_range_around(pair.open_range); - let buffer_close_range = buffer_snapshot.anchor_range_around(pair.close_range); - let [ - buffer_open_range_start, - buffer_open_range_end, - buffer_close_range_start, - buffer_close_range_end, - ] = anchors_in_multi_buffer( - excerpt_id, - [ - buffer_open_range.start, - buffer_open_range.end, - buffer_close_range.start, - buffer_close_range.end, - ], - )?; - let multi_buffer_open_range = buffer_open_range_start.zip(buffer_open_range_end); - let multi_buffer_close_range = buffer_close_range_start.zip(buffer_close_range_end); + let mut ranges = Vec::new(); - let mut ranges = Vec::with_capacity(2); - if let Some((open_start, open_end)) = multi_buffer_open_range { - ranges.push(open_start..open_end); - } - if let Some((close_start, close_end)) = multi_buffer_close_range { - ranges.push(close_start..close_end); - } - if ranges.is_empty() { - None - } else { - Some((color_index % accents_count, ranges)) - } + if context.start <= pair.open_range.start && pair.open_range.end <= context.end { + let anchors = buffer_snapshot.anchor_range_inside(pair.open_range); + ranges.push( + multi_buffer_snapshot.anchor_in_buffer(anchors.start)? + ..multi_buffer_snapshot.anchor_in_buffer(anchors.end)?, + ); + }; + + if context.start <= pair.close_range.start && pair.close_range.end <= context.end { + let anchors = buffer_snapshot.anchor_range_inside(pair.close_range); + ranges.push( + multi_buffer_snapshot.anchor_in_buffer(anchors.start)? + ..multi_buffer_snapshot.anchor_in_buffer(anchors.end)?, + ); + }; + + Some((color_index % accents_count, ranges)) }) .collect() } @@ -1197,7 +1184,7 @@ mod foo «1{ ); } - let buffer_snapshot = snapshot.buffer().as_singleton().unwrap().2; + let buffer_snapshot = snapshot.buffer().as_singleton().unwrap(); for bracket_match in buffer_snapshot .fetch_bracket_ranges( snapshot @@ -1464,6 +1451,101 @@ mod foo «1{ ); } + #[gpui::test] + async fn test_multi_buffer_close_excerpts(cx: &mut gpui::TestAppContext) { + let comment_lines = 5; + + init_test(cx, |language_settings| { + language_settings.defaults.colorize_brackets = Some(true); + }); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/a"), + json!({ + "lib.rs": separate_with_comment_lines( + indoc! {r#" + fn process_data_1() { + let map: Option> = None; + } + "#}, + indoc! {r#" + fn process_data_2() { + let other_map: Option> = None; + } + "#}, + comment_lines, + ) + }), + ) + .await; + + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + + let buffer_1 = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/a/lib.rs"), cx) + }) + .await + .unwrap(); + + let second_excerpt_start = buffer_1.read_with(cx, |buffer, _| { + let text = buffer.text(); + text.lines() + .enumerate() + .find(|(_, line)| line.contains("process_data_2")) + .map(|(row, _)| row as u32) + .unwrap() + }); + + let multi_buffer = cx.new(|cx| { + let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite); + multi_buffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer_1.clone(), + [ + Point::new(0, 0)..Point::new(3, 0), + Point::new(second_excerpt_start, 0)..Point::new(second_excerpt_start + 3, 0), + ], + 0, + cx, + ); + multi_buffer + }); + + let editor = cx.add_window(|window, cx| { + Editor::for_multibuffer(multi_buffer, Some(project.clone()), window, cx) + }); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + let editor_snapshot = editor + .update(cx, |editor, window, cx| editor.snapshot(window, cx)) + .unwrap(); + assert_eq!( + concat!( + "\n", + "\n", + "fn process_data_1\u{00ab}1()1\u{00bb} \u{00ab}1{\n", + " let map: Option\u{00ab}23\u{00bb}>2\u{00bb} = None;\n", + "}1\u{00bb}\n", + "\n", + "\n", + "fn process_data_2\u{00ab}1()1\u{00bb} \u{00ab}1{\n", + " let other_map: Option\u{00ab}23\u{00bb}>2\u{00bb} = None;\n", + "}1\u{00bb}\n", + "\n", + "1 hsla(207.80, 16.20%, 69.19%, 1.00)\n", + "2 hsla(29.00, 54.00%, 65.88%, 1.00)\n", + "3 hsla(286.00, 51.00%, 75.25%, 1.00)\n", + "4 hsla(187.00, 47.00%, 59.22%, 1.00)\n", + ), + &editor_bracket_colors_markup(&editor_snapshot), + "Two close excerpts from the same buffer (within same tree-sitter chunk) should both have bracket colors" + ); + } + #[gpui::test] // reproduction of #47846 async fn test_bracket_colorization_with_folds(cx: &mut gpui::TestAppContext) { diff --git a/crates/editor/src/code_completion_tests.rs b/crates/editor/src/code_completion_tests.rs index 4602824486ebb88f78ed529abb91ddcc1c34646f..3211f0b818eb3079007db4bf268e84bd53d3cbf1 100644 --- a/crates/editor/src/code_completion_tests.rs +++ b/crates/editor/src/code_completion_tests.rs @@ -7,7 +7,7 @@ use project::{Completion, CompletionSource}; use settings::SnippetSortOrder; use std::sync::Arc; use std::sync::atomic::AtomicBool; -use text::Anchor; +use text::{Anchor, BufferId}; #[gpui::test] async fn test_sort_kind(cx: &mut TestAppContext) { @@ -393,7 +393,7 @@ impl CompletionBuilder { kind: Option, ) -> Completion { Completion { - replace_range: Anchor::MIN..Anchor::MAX, + replace_range: Anchor::min_max_range_for_buffer(BufferId::new(1).unwrap()), new_text: label.to_string(), label: CodeLabel::plain(label.to_string(), filter_text), documentation: None, diff --git a/crates/editor/src/code_context_menus.rs b/crates/editor/src/code_context_menus.rs index 5d6c037d9b67034423dda9f119a1e78fb1e5b9b2..2db2086eef422a87a0825c4a4ad820d422b160e9 100644 --- a/crates/editor/src/code_context_menus.rs +++ b/crates/editor/src/code_context_menus.rs @@ -10,7 +10,7 @@ use language::CodeLabel; use language::{Buffer, LanguageName, LanguageRegistry}; use lsp::CompletionItemTag; use markdown::{CopyButtonVisibility, Markdown, MarkdownElement}; -use multi_buffer::{Anchor, ExcerptId}; +use multi_buffer::Anchor; use ordered_float::OrderedFloat; use project::lsp_store::CompletionDocumentation; use project::{CodeAction, Completion, TaskSourceKind}; @@ -357,7 +357,8 @@ impl CompletionsMenu { id: CompletionId, sort_completions: bool, choices: &Vec, - selection: Range, + initial_position: Anchor, + selection: Range, buffer: Entity, scroll_handle: Option, snippet_sort_order: SnippetSortOrder, @@ -365,7 +366,7 @@ impl CompletionsMenu { let completions = choices .iter() .map(|choice| Completion { - replace_range: selection.start.text_anchor..selection.end.text_anchor, + replace_range: selection.clone(), new_text: choice.to_string(), label: CodeLabel::plain(choice.to_string(), None), match_start: None, @@ -400,7 +401,7 @@ impl CompletionsMenu { id, source: CompletionsMenuSource::SnippetChoices, sort_completions, - initial_position: selection.start, + initial_position, initial_query: None, is_incomplete: false, buffer, @@ -1380,7 +1381,6 @@ impl CompletionsMenu { #[derive(Clone)] pub struct AvailableCodeAction { - pub excerpt_id: ExcerptId, pub action: CodeAction, pub provider: Rc, } @@ -1433,7 +1433,6 @@ impl CodeActionContents { }) .chain(self.actions.iter().flat_map(|actions| { actions.iter().map(|available| CodeActionsItem::CodeAction { - excerpt_id: available.excerpt_id, action: available.action.clone(), provider: available.provider.clone(), }) @@ -1457,7 +1456,6 @@ impl CodeActionContents { if let Some(actions) = &self.actions { if let Some(available) = actions.get(index) { return Some(CodeActionsItem::CodeAction { - excerpt_id: available.excerpt_id, action: available.action.clone(), provider: available.provider.clone(), }); @@ -1477,7 +1475,6 @@ impl CodeActionContents { pub enum CodeActionsItem { Task(TaskSourceKind, ResolvedTask), CodeAction { - excerpt_id: ExcerptId, action: CodeAction, provider: Rc, }, diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 933f0e6e18e57c38b6bcc3636f60bd1ae671d3a6..f95f1030276015af4825119fc98ac68b876d0e5f 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -103,7 +103,7 @@ use language::{ }; use multi_buffer::{ - Anchor, AnchorRangeExt, ExcerptId, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16, + Anchor, AnchorRangeExt, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16, MultiBufferPoint, MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset, ToPoint, }; use project::project_settings::DiagnosticSeverity; @@ -125,7 +125,7 @@ use std::{ fmt::Debug, iter, num::NonZeroU32, - ops::{self, Add, Bound, Range, Sub}, + ops::{self, Add, Range, Sub}, sync::Arc, }; @@ -195,10 +195,9 @@ pub struct CompanionExcerptPatch { } pub type ConvertMultiBufferRows = fn( - &HashMap, &MultiBufferSnapshot, &MultiBufferSnapshot, - (Bound, Bound), + Range, ) -> Vec; /// Decides how text in a [`MultiBuffer`] should be displayed in a buffer, handling inlay hints, @@ -240,8 +239,6 @@ pub(crate) struct Companion { rhs_display_map_id: EntityId, rhs_buffer_to_lhs_buffer: HashMap, lhs_buffer_to_rhs_buffer: HashMap, - rhs_excerpt_to_lhs_excerpt: HashMap, - lhs_excerpt_to_rhs_excerpt: HashMap, rhs_rows_to_lhs_rows: ConvertMultiBufferRows, lhs_rows_to_rhs_rows: ConvertMultiBufferRows, rhs_custom_block_to_balancing_block: RefCell>, @@ -258,8 +255,6 @@ impl Companion { rhs_display_map_id, rhs_buffer_to_lhs_buffer: Default::default(), lhs_buffer_to_rhs_buffer: Default::default(), - rhs_excerpt_to_lhs_excerpt: Default::default(), - lhs_excerpt_to_rhs_excerpt: Default::default(), rhs_rows_to_lhs_rows, lhs_rows_to_rhs_rows, rhs_custom_block_to_balancing_block: Default::default(), @@ -287,14 +282,14 @@ impl Companion { display_map_id: EntityId, companion_snapshot: &MultiBufferSnapshot, our_snapshot: &MultiBufferSnapshot, - bounds: (Bound, Bound), + bounds: Range, ) -> Vec { - let (excerpt_map, convert_fn) = if self.is_rhs(display_map_id) { - (&self.rhs_excerpt_to_lhs_excerpt, self.rhs_rows_to_lhs_rows) + let convert_fn = if self.is_rhs(display_map_id) { + self.rhs_rows_to_lhs_rows } else { - (&self.lhs_excerpt_to_rhs_excerpt, self.lhs_rows_to_rhs_rows) + self.lhs_rows_to_rhs_rows }; - convert_fn(excerpt_map, companion_snapshot, our_snapshot, bounds) + convert_fn(companion_snapshot, our_snapshot, bounds) } pub(crate) fn convert_point_from_companion( @@ -304,20 +299,15 @@ impl Companion { companion_snapshot: &MultiBufferSnapshot, point: MultiBufferPoint, ) -> Range { - let (excerpt_map, convert_fn) = if self.is_rhs(display_map_id) { - (&self.lhs_excerpt_to_rhs_excerpt, self.lhs_rows_to_rhs_rows) + let convert_fn = if self.is_rhs(display_map_id) { + self.lhs_rows_to_rhs_rows } else { - (&self.rhs_excerpt_to_lhs_excerpt, self.rhs_rows_to_lhs_rows) + self.rhs_rows_to_lhs_rows }; - let excerpt = convert_fn( - excerpt_map, - our_snapshot, - companion_snapshot, - (Bound::Included(point), Bound::Included(point)), - ) - .into_iter() - .next(); + let excerpt = convert_fn(our_snapshot, companion_snapshot, point..point) + .into_iter() + .next(); let Some(excerpt) = excerpt else { return Point::zero()..our_snapshot.max_point(); @@ -332,20 +322,15 @@ impl Companion { companion_snapshot: &MultiBufferSnapshot, point: MultiBufferPoint, ) -> Range { - let (excerpt_map, convert_fn) = if self.is_rhs(display_map_id) { - (&self.rhs_excerpt_to_lhs_excerpt, self.rhs_rows_to_lhs_rows) + let convert_fn = if self.is_rhs(display_map_id) { + self.rhs_rows_to_lhs_rows } else { - (&self.lhs_excerpt_to_rhs_excerpt, self.lhs_rows_to_rhs_rows) + self.lhs_rows_to_rhs_rows }; - let excerpt = convert_fn( - excerpt_map, - companion_snapshot, - our_snapshot, - (Bound::Included(point), Bound::Included(point)), - ) - .into_iter() - .next(); + let excerpt = convert_fn(companion_snapshot, our_snapshot, point..point) + .into_iter() + .next(); let Some(excerpt) = excerpt else { return Point::zero()..companion_snapshot.max_point(); @@ -353,30 +338,6 @@ impl Companion { excerpt.patch.edit_for_old_position(point).new } - pub(crate) fn companion_excerpt_to_excerpt( - &self, - display_map_id: EntityId, - ) -> &HashMap { - if self.is_rhs(display_map_id) { - &self.lhs_excerpt_to_rhs_excerpt - } else { - &self.rhs_excerpt_to_lhs_excerpt - } - } - - #[cfg(test)] - pub(crate) fn excerpt_mappings( - &self, - ) -> ( - &HashMap, - &HashMap, - ) { - ( - &self.lhs_excerpt_to_rhs_excerpt, - &self.rhs_excerpt_to_lhs_excerpt, - ) - } - fn buffer_to_companion_buffer(&self, display_map_id: EntityId) -> &HashMap { if self.is_rhs(display_map_id) { &self.rhs_buffer_to_lhs_buffer @@ -385,24 +346,6 @@ impl Companion { } } - pub(crate) fn add_excerpt_mapping(&mut self, lhs_id: ExcerptId, rhs_id: ExcerptId) { - self.lhs_excerpt_to_rhs_excerpt.insert(lhs_id, rhs_id); - self.rhs_excerpt_to_lhs_excerpt.insert(rhs_id, lhs_id); - } - - pub(crate) fn remove_excerpt_mappings( - &mut self, - lhs_ids: impl IntoIterator, - rhs_ids: impl IntoIterator, - ) { - for id in lhs_ids { - self.lhs_excerpt_to_rhs_excerpt.remove(&id); - } - for id in rhs_ids { - self.rhs_excerpt_to_lhs_excerpt.remove(&id); - } - } - pub(crate) fn lhs_to_rhs_buffer(&self, lhs_buffer_id: BufferId) -> Option { self.lhs_buffer_to_rhs_buffer.get(&lhs_buffer_id).copied() } @@ -457,10 +400,13 @@ impl DisplayMap { diagnostics_max_severity: DiagnosticSeverity, cx: &mut Context, ) -> Self { - let buffer_subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); - let tab_size = Self::tab_size(&buffer, cx); + // Important: obtain the snapshot BEFORE creating the subscription. + // snapshot() may call sync() which publishes edits. If we subscribe first, + // those edits would be captured but the InlayMap would already be at the + // post-edit state, causing a desync. let buffer_snapshot = buffer.read(cx).snapshot(cx); + let buffer_subscription = buffer.update(cx, |buffer, _| buffer.subscribe()); let crease_map = CreaseMap::new(&buffer_snapshot); let (inlay_map, snapshot) = InlayMap::new(buffer_snapshot); let (fold_map, snapshot) = FoldMap::new(snapshot); @@ -540,8 +486,7 @@ impl DisplayMap { .wrap_map .update(cx, |wrap_map, cx| wrap_map.sync(snapshot, edits, cx)); - let (snapshot, edits) = - writer.unfold_intersecting([Anchor::min()..Anchor::max()], true); + let (snapshot, edits) = writer.unfold_intersecting([Anchor::Min..Anchor::Max], true); let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size); let (snapshot, _edits) = self .wrap_map @@ -632,18 +577,6 @@ impl DisplayMap { self.companion.as_ref().map(|(_, c)| c) } - pub(crate) fn companion_excerpt_to_my_excerpt( - &self, - their_id: ExcerptId, - cx: &App, - ) -> Option { - let (_, companion) = self.companion.as_ref()?; - let c = companion.read(cx); - c.companion_excerpt_to_excerpt(self.entity_id) - .get(&their_id) - .copied() - } - fn sync_through_wrap(&mut self, cx: &mut App) -> (WrapSnapshot, WrapPatch) { let tab_size = Self::tab_size(&self.buffer, cx); let buffer_snapshot = self.buffer.read(cx).snapshot(cx); @@ -1054,17 +987,10 @@ impl DisplayMap { return; } - let excerpt_ids = snapshot - .excerpts() - .filter(|(_, buf, _)| buf.remote_id() == buffer_id) - .map(|(id, _, _)| id) - .collect::>(); - let base_placeholder = self.fold_placeholder.clone(); let creases = ranges.into_iter().filter_map(|folding_range| { - let mb_range = excerpt_ids.iter().find_map(|&id| { - snapshot.anchor_range_in_excerpt(id, folding_range.range.clone()) - })?; + let mb_range = + snapshot.buffer_anchor_range_to_anchor_range(folding_range.range.clone())?; let placeholder = if let Some(collapsed_text) = folding_range.collapsed_text { FoldPlaceholder { render: Arc::new({ @@ -4156,4 +4082,64 @@ pub mod tests { chunks, ); } + + /// Regression test: Creating a DisplayMap when the MultiBuffer has pending + /// unsynced changes should not cause a desync between the subscription edits + /// and the InlayMap's buffer state. + /// + /// The bug occurred because: + /// 1. DisplayMap::new created a subscription first + /// 2. Then called snapshot() which synced and published edits + /// 3. InlayMap was created with the post-sync snapshot + /// 4. But the subscription captured the sync edits, leading to double-application + #[gpui::test] + fn test_display_map_subscription_ordering(cx: &mut gpui::App) { + init_test(cx, &|_| {}); + + // Create a buffer with some initial text + let buffer = cx.new(|cx| Buffer::local("initial", cx)); + let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx)); + + // Edit the buffer. This sets buffer_changed_since_sync = true. + // Importantly, do NOT call multibuffer.snapshot() yet. + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..0, "prefix ")], None, cx); + }); + + // Create the DisplayMap. In the buggy code, this would: + // 1. Create subscription (empty) + // 2. Call snapshot() which syncs and publishes edits E1 + // 3. Create InlayMap with post-E1 snapshot + // 4. Subscription now has E1, but InlayMap is already at post-E1 state + let map = cx.new(|cx| { + DisplayMap::new( + multibuffer.clone(), + font("Helvetica"), + px(14.0), + None, + 1, + 1, + FoldPlaceholder::test(), + DiagnosticSeverity::Warning, + cx, + ) + }); + + // Verify initial state is correct + let snapshot = map.update(cx, |map, cx| map.snapshot(cx)); + assert_eq!(snapshot.text(), "prefix initial"); + + // Make another edit + buffer.update(cx, |buffer, cx| { + buffer.edit([(7..7, "more ")], None, cx); + }); + + // This would crash in the buggy code because: + // - InlayMap expects edits from V1 to V2 + // - But subscription has E1 ∘ E2 (from V0 to V2) + // - The calculation `buffer_edit.new.end + (cursor.end().0 - buffer_edit.old.end)` + // would produce an offset exceeding the buffer length + let snapshot = map.update(cx, |map, cx| map.snapshot(cx)); + assert_eq!(snapshot.text(), "prefix more initial"); + } } diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 531de6da49e375a4f7ba2833106e1716de551ff2..67318e3300e73085fe40c2e22edfcd06778902c8 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -11,8 +11,8 @@ use collections::{Bound, HashMap, HashSet}; use gpui::{AnyElement, App, EntityId, Pixels, Window}; use language::{Patch, Point}; use multi_buffer::{ - Anchor, ExcerptId, ExcerptInfo, MultiBuffer, MultiBufferOffset, MultiBufferPoint, - MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset, ToPoint as _, + Anchor, ExcerptBoundaryInfo, MultiBuffer, MultiBufferOffset, MultiBufferPoint, MultiBufferRow, + MultiBufferSnapshot, RowInfo, ToOffset, ToPoint as _, }; use parking_lot::Mutex; use std::{ @@ -298,10 +298,10 @@ pub struct BlockContext<'a, 'b> { pub indent_guide_padding: Pixels, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum BlockId { - ExcerptBoundary(ExcerptId), - FoldedBuffer(ExcerptId), + ExcerptBoundary(Anchor), + FoldedBuffer(BufferId), Custom(CustomBlockId), Spacer(SpacerId), } @@ -310,10 +310,8 @@ impl From for ElementId { fn from(value: BlockId) -> Self { match value { BlockId::Custom(CustomBlockId(id)) => ("Block", id).into(), - BlockId::ExcerptBoundary(excerpt_id) => { - ("ExcerptBoundary", EntityId::from(excerpt_id)).into() - } - BlockId::FoldedBuffer(id) => ("FoldedBuffer", EntityId::from(id)).into(), + BlockId::ExcerptBoundary(anchor) => anchor.opaque_id().unwrap().into(), + BlockId::FoldedBuffer(id) => ("FoldedBuffer", EntityId::from(id.to_proto())).into(), BlockId::Spacer(SpacerId(id)) => ("Spacer", id).into(), } } @@ -323,7 +321,7 @@ impl std::fmt::Display for BlockId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Custom(id) => write!(f, "Block({id:?})"), - Self::ExcerptBoundary(id) => write!(f, "ExcerptHeader({id:?})"), + Self::ExcerptBoundary(id) => write!(f, "ExcerptBoundary({id:?})"), Self::FoldedBuffer(id) => write!(f, "FoldedBuffer({id:?})"), Self::Spacer(id) => write!(f, "Spacer({id:?})"), } @@ -340,15 +338,15 @@ struct Transform { pub enum Block { Custom(Arc), FoldedBuffer { - first_excerpt: ExcerptInfo, + first_excerpt: ExcerptBoundaryInfo, height: u32, }, ExcerptBoundary { - excerpt: ExcerptInfo, + excerpt: ExcerptBoundaryInfo, height: u32, }, BufferHeader { - excerpt: ExcerptInfo, + excerpt: ExcerptBoundaryInfo, height: u32, }, Spacer { @@ -365,12 +363,14 @@ impl Block { Block::ExcerptBoundary { excerpt: next_excerpt, .. - } => BlockId::ExcerptBoundary(next_excerpt.id), - Block::FoldedBuffer { first_excerpt, .. } => BlockId::FoldedBuffer(first_excerpt.id), + } => BlockId::ExcerptBoundary(next_excerpt.start_anchor), + Block::FoldedBuffer { first_excerpt, .. } => { + BlockId::FoldedBuffer(first_excerpt.buffer_id()) + } Block::BufferHeader { excerpt: next_excerpt, .. - } => BlockId::ExcerptBoundary(next_excerpt.id), + } => BlockId::ExcerptBoundary(next_excerpt.start_anchor), Block::Spacer { id, .. } => BlockId::Spacer(*id), } } @@ -1174,10 +1174,10 @@ impl BlockMap { let wrap_row = wrap_row_for(Point::new(excerpt_boundary.row.0, 0), Bias::Left); let new_buffer_id = match (&excerpt_boundary.prev, &excerpt_boundary.next) { - (None, next) => Some(next.buffer_id), + (None, next) => Some(next.buffer_id()), (Some(prev), next) => { - if prev.buffer_id != next.buffer_id { - Some(next.buffer_id) + if prev.buffer_id() != next.buffer_id() { + Some(next.buffer_id()) } else { None } @@ -1195,7 +1195,7 @@ impl BlockMap { let mut last_excerpt_end_row = first_excerpt.end_row; while let Some(next_boundary) = boundaries.peek() { - if next_boundary.next.buffer_id == new_buffer_id { + if next_boundary.next.buffer_id() == new_buffer_id { last_excerpt_end_row = next_boundary.next.end_row; } else { break; @@ -1254,12 +1254,24 @@ impl BlockMap { let our_buffer = wrap_snapshot.buffer_snapshot(); let companion_buffer = companion_snapshot.buffer_snapshot(); - let patches = companion.convert_rows_to_companion( + let range = match bounds { + (Bound::Included(start), Bound::Excluded(end)) => start..end, + (Bound::Included(start), Bound::Unbounded) => start..wrap_snapshot.buffer().max_point(), + _ => unreachable!(), + }; + let mut patches = companion.convert_rows_to_companion( display_map_id, companion_buffer, our_buffer, - bounds, + range, ); + if let Some(patch) = patches.last() + && let Bound::Excluded(end) = bounds.1 + && end == wrap_snapshot.buffer().max_point() + && patch.source_excerpt_range.is_empty() + { + patches.pop(); + } let mut our_inlay_point_cursor = wrap_snapshot.inlay_point_cursor(); let mut our_fold_point_cursor = wrap_snapshot.fold_point_cursor(); @@ -1391,18 +1403,15 @@ impl BlockMap { } } - // Main loop: process one hunk/group at a time, possibly inserting spacers before and after. while let Some(source_point) = source_points.next() { let mut current_boundary = source_point; let current_range = excerpt.patch.edit_for_old_position(current_boundary).new; - // This can only occur at the end of an excerpt. if current_boundary.column > 0 { debug_assert_eq!(current_boundary, excerpt.source_excerpt_range.end); break; } - // Align the two sides at the start of this group. let (delta_at_start, mut spacer_at_start) = determine_spacer( &mut our_wrapper, &mut companion_wrapper, @@ -1434,7 +1443,6 @@ impl BlockMap { source_points.next(); } - // This can only occur at the end of an excerpt. if current_boundary.column > 0 { debug_assert_eq!(current_boundary, excerpt.source_excerpt_range.end); break; @@ -1538,7 +1546,8 @@ impl BlockMap { | Block::BufferHeader { excerpt: excerpt_b, .. }, - ) => Some(excerpt_a.id).cmp(&Some(excerpt_b.id)), + ) => Some(excerpt_a.start_text_anchor().opaque_id()) + .cmp(&Some(excerpt_b.start_text_anchor().opaque_id())), ( Block::ExcerptBoundary { .. } | Block::BufferHeader { .. }, Block::Spacer { .. } | Block::Custom(_), @@ -2034,6 +2043,7 @@ impl BlockMapWriter<'_> { multi_buffer: &MultiBuffer, cx: &App, ) { + let multi_buffer_snapshot = multi_buffer.snapshot(cx); let mut ranges = Vec::new(); let mut companion_buffer_ids = HashSet::default(); for buffer_id in buffer_ids { @@ -2042,7 +2052,7 @@ impl BlockMapWriter<'_> { } else { self.block_map.folded_buffers.remove(&buffer_id); } - ranges.extend(multi_buffer.excerpt_ranges_for_buffer(buffer_id, cx)); + ranges.extend(multi_buffer_snapshot.range_for_buffer(buffer_id)); if let Some(companion) = &self.companion && companion.inverse.is_some() { @@ -2268,14 +2278,16 @@ impl BlockSnapshot { let custom_block = self.custom_blocks_by_id.get(&custom_block_id)?; return Some(Block::Custom(custom_block.clone())); } - BlockId::ExcerptBoundary(next_excerpt_id) => { - let excerpt_range = buffer.range_for_excerpt(next_excerpt_id)?; - self.wrap_snapshot - .make_wrap_point(excerpt_range.start, Bias::Left) + BlockId::ExcerptBoundary(start_anchor) => { + let start_point = start_anchor.to_point(&buffer); + self.wrap_snapshot.make_wrap_point(start_point, Bias::Left) } - BlockId::FoldedBuffer(excerpt_id) => self - .wrap_snapshot - .make_wrap_point(buffer.range_for_excerpt(excerpt_id)?.start, Bias::Left), + BlockId::FoldedBuffer(buffer_id) => self.wrap_snapshot.make_wrap_point( + buffer + .anchor_in_excerpt(buffer.excerpts_for_buffer(buffer_id).next()?.context.start)? + .to_point(buffer), + Bias::Left, + ), BlockId::Spacer(_) => return None, }; let wrap_row = wrap_point.row(); @@ -2571,7 +2583,7 @@ impl BlockChunks<'_> { } pub struct StickyHeaderExcerpt<'a> { - pub excerpt: &'a ExcerptInfo, + pub excerpt: &'a ExcerptBoundaryInfo, } impl<'a> Iterator for BlockChunks<'a> { @@ -3096,7 +3108,13 @@ mod tests { ); multi_buffer }); - let excerpt_ids = multi_buffer.read_with(cx, |mb, _| mb.excerpt_ids()); + let excerpt_start_anchors = multi_buffer.read_with(cx, |mb, _| { + let snapshot = mb.snapshot(cx); + snapshot + .excerpts() + .map(|e| snapshot.anchor_in_excerpt(e.context.start).unwrap()) + .collect::>() + }); let font = test_font(); let font_size = px(14.); @@ -3129,9 +3147,9 @@ mod tests { assert_eq!( blocks, vec![ - (0..1, BlockId::ExcerptBoundary(excerpt_ids[0])), // path, header - (3..4, BlockId::ExcerptBoundary(excerpt_ids[1])), // path, header - (6..7, BlockId::ExcerptBoundary(excerpt_ids[2])), // path, header + (0..1, BlockId::ExcerptBoundary(excerpt_start_anchors[0])), // path, header + (3..4, BlockId::ExcerptBoundary(excerpt_start_anchors[1])), // path, header + (6..7, BlockId::ExcerptBoundary(excerpt_start_anchors[2])), // path, header ] ); } @@ -3447,13 +3465,13 @@ mod tests { ], cx, ); - assert_eq!(multibuffer.read(cx).excerpt_ids().len(), 6); + assert_eq!(multibuffer.read(cx).snapshot(cx).excerpts().count(), 6); multibuffer }); let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); let buffer_ids = buffer_snapshot .excerpts() - .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id()) + .map(|excerpt| excerpt.context.start.buffer_id) .dedup() .collect::>(); assert_eq!(buffer_ids.len(), 3); @@ -3800,7 +3818,7 @@ mod tests { let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); let buffer_ids = buffer_snapshot .excerpts() - .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id()) + .map(|excerpt| excerpt.context.start.buffer_id) .dedup() .collect::>(); assert_eq!(buffer_ids.len(), 1); @@ -4008,17 +4026,16 @@ mod tests { wrap_map.sync(tab_snapshot, tab_edits, cx) }); let mut block_map = block_map.write(wraps_snapshot, wrap_edits, None); - let (unfolded_buffers, folded_buffers) = buffer.read_with(cx, |buffer, _| { - let folded_buffers: Vec<_> = - block_map.block_map.folded_buffers.iter().cloned().collect(); - let mut unfolded_buffers = buffer.excerpt_buffer_ids(); - unfolded_buffers.dedup(); - log::debug!("All buffers {unfolded_buffers:?}"); - log::debug!("Folded buffers {folded_buffers:?}"); - unfolded_buffers.retain(|buffer_id| { - !block_map.block_map.folded_buffers.contains(buffer_id) - }); - (unfolded_buffers, folded_buffers) + let folded_buffers: Vec<_> = + block_map.block_map.folded_buffers.iter().cloned().collect(); + let mut unfolded_buffers = buffer_snapshot + .buffer_ids_for_range(Anchor::Min..Anchor::Max) + .collect::>(); + unfolded_buffers.dedup(); + log::debug!("All buffers {unfolded_buffers:?}"); + log::debug!("Folded buffers {folded_buffers:?}"); + unfolded_buffers.retain(|buffer_id| { + !block_map.block_map.folded_buffers.contains(buffer_id) }); let mut folded_count = folded_buffers.len(); let mut unfolded_count = unfolded_buffers.len(); @@ -4039,12 +4056,14 @@ mod tests { log::info!("Folding {buffer_to_fold:?}"); let related_excerpts = buffer_snapshot .excerpts() - .filter_map(|(excerpt_id, buffer, range)| { - if buffer.remote_id() == buffer_to_fold { + .filter_map(|excerpt| { + if excerpt.context.start.buffer_id == buffer_to_fold { Some(( - excerpt_id, - buffer - .text_for_range(range.context) + excerpt.context.start, + buffer_snapshot + .buffer_for_id(buffer_to_fold) + .unwrap() + .text_for_range(excerpt.context) .collect::(), )) } else { @@ -4518,7 +4537,7 @@ mod tests { let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); let buffer_ids = buffer_snapshot .excerpts() - .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id()) + .map(|excerpt| excerpt.context.start.buffer_id) .dedup() .collect::>(); assert_eq!(buffer_ids.len(), 1); @@ -4563,7 +4582,7 @@ mod tests { let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx)); let buffer_ids = buffer_snapshot .excerpts() - .map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id()) + .map(|excerpt| excerpt.context.start.buffer_id) .dedup() .collect::>(); assert_eq!(buffer_ids.len(), 1); @@ -4635,11 +4654,6 @@ mod tests { let subscription = rhs_multibuffer.update(cx, |rhs_multibuffer, _| rhs_multibuffer.subscribe()); - let lhs_excerpt_id = - lhs_multibuffer.read_with(cx, |mb, cx| mb.snapshot(cx).excerpts().next().unwrap().0); - let rhs_excerpt_id = - rhs_multibuffer.read_with(cx, |mb, cx| mb.snapshot(cx).excerpts().next().unwrap().0); - let lhs_buffer_snapshot = cx.update(|cx| lhs_multibuffer.read(cx).snapshot(cx)); let (mut _lhs_inlay_map, lhs_inlay_snapshot) = InlayMap::new(lhs_buffer_snapshot); let (mut _lhs_fold_map, lhs_fold_snapshot) = FoldMap::new(lhs_inlay_snapshot); @@ -4661,13 +4675,11 @@ mod tests { let rhs_entity_id = rhs_multibuffer.entity_id(); let companion = cx.new(|_| { - let mut c = Companion::new( + Companion::new( rhs_entity_id, convert_rhs_rows_to_lhs, convert_lhs_rows_to_rhs, - ); - c.add_excerpt_mapping(lhs_excerpt_id, rhs_excerpt_id); - c + ) }); let rhs_edits = Patch::new(vec![text::Edit { diff --git a/crates/editor/src/display_map/crease_map.rs b/crates/editor/src/display_map/crease_map.rs index 7c81562b7448bdb53bd0dd641eada92dff527aac..1664012b5eb43fb82c7c0fce38844d98ab0f7226 100644 --- a/crates/editor/src/display_map/crease_map.rs +++ b/crates/editor/src/display_map/crease_map.rs @@ -363,7 +363,7 @@ pub struct ItemSummary { impl Default for ItemSummary { fn default() -> Self { Self { - range: Anchor::min()..Anchor::min(), + range: Anchor::Min..Anchor::Min, } } } diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 95479e297cb82adcf8c3eb1f73e95f8b557eef43..1554bb96dab0e2f76a17df1396bd945f332af208 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -185,16 +185,18 @@ impl FoldMapWriter<'_> { continue; } + let fold_range = buffer.anchor_after(range.start)..buffer.anchor_before(range.end); // For now, ignore any ranges that span an excerpt boundary. - let fold_range = - FoldRange(buffer.anchor_after(range.start)..buffer.anchor_before(range.end)); - if fold_range.0.start.excerpt_id != fold_range.0.end.excerpt_id { + if buffer + .anchor_range_to_buffer_anchor_range(fold_range.clone()) + .is_none() + { continue; } folds.push(Fold { id: FoldId(post_inc(&mut self.0.next_fold_id.0)), - range: fold_range, + range: FoldRange(fold_range), placeholder: fold_text, }); @@ -510,7 +512,7 @@ impl FoldMap { .snapshot .folds .cursor::(&inlay_snapshot.buffer); - folds_cursor.seek(&FoldRange(anchor..Anchor::max()), Bias::Left); + folds_cursor.seek(&FoldRange(anchor..Anchor::Max), Bias::Left); let mut folds = iter::from_fn({ let inlay_snapshot = &inlay_snapshot; @@ -1226,7 +1228,7 @@ impl DerefMut for FoldRange { impl Default for FoldRange { fn default() -> Self { - Self(Anchor::min()..Anchor::max()) + Self(Anchor::Min..Anchor::Max) } } @@ -1262,10 +1264,10 @@ pub struct FoldSummary { impl Default for FoldSummary { fn default() -> Self { Self { - start: Anchor::min(), - end: Anchor::max(), - min_start: Anchor::max(), - max_end: Anchor::min(), + start: Anchor::Min, + end: Anchor::Max, + min_start: Anchor::Max, + max_end: Anchor::Min, count: 0, } } diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index 9c05a182ef56eb803ff545a1c9d3914b505767aa..47ca295ccb1a08768ce129b92d10506294a9cf78 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -1342,7 +1342,7 @@ mod tests { use settings::SettingsStore; use std::{cmp::Reverse, env, sync::Arc}; use sum_tree::TreeMap; - use text::{Patch, Rope}; + use text::{BufferId, Patch, Rope}; use util::RandomCharIter; use util::post_inc; @@ -1351,10 +1351,10 @@ mod tests { assert_eq!( Inlay::hint( InlayId::Hint(0), - Anchor::min(), + Anchor::Min, &InlayHint { label: InlayHintLabel::String("a".to_string()), - position: text::Anchor::MIN, + position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()), padding_left: false, padding_right: false, tooltip: None, @@ -1371,10 +1371,10 @@ mod tests { assert_eq!( Inlay::hint( InlayId::Hint(0), - Anchor::min(), + Anchor::Min, &InlayHint { label: InlayHintLabel::String("a".to_string()), - position: text::Anchor::MIN, + position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()), padding_left: true, padding_right: true, tooltip: None, @@ -1391,10 +1391,10 @@ mod tests { assert_eq!( Inlay::hint( InlayId::Hint(0), - Anchor::min(), + Anchor::Min, &InlayHint { label: InlayHintLabel::String(" a ".to_string()), - position: text::Anchor::MIN, + position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()), padding_left: false, padding_right: false, tooltip: None, @@ -1411,10 +1411,10 @@ mod tests { assert_eq!( Inlay::hint( InlayId::Hint(0), - Anchor::min(), + Anchor::Min, &InlayHint { label: InlayHintLabel::String(" a ".to_string()), - position: text::Anchor::MIN, + position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()), padding_left: true, padding_right: true, tooltip: None, @@ -1434,10 +1434,10 @@ mod tests { assert_eq!( Inlay::hint( InlayId::Hint(0), - Anchor::min(), + Anchor::Min, &InlayHint { label: InlayHintLabel::String("🎨".to_string()), - position: text::Anchor::MIN, + position: text::Anchor::min_for_buffer(BufferId::new(1).unwrap()), padding_left: true, padding_right: true, tooltip: None, diff --git a/crates/editor/src/document_colors.rs b/crates/editor/src/document_colors.rs index a38a0527f0641ef2d622b2f33fa1e932080ad7b5..8f8b70128ffc2bb66b2147baaa53d77e40c03c25 100644 --- a/crates/editor/src/document_colors.rs +++ b/crates/editor/src/document_colors.rs @@ -8,7 +8,7 @@ use language::point_from_lsp; use multi_buffer::Anchor; use project::{DocumentColor, InlayId}; use settings::Settings as _; -use text::{Bias, BufferId, OffsetRangeExt as _}; +use text::{Bias, BufferId}; use ui::{App, Context, Window}; use util::post_inc; @@ -160,9 +160,9 @@ impl Editor { } let buffers_to_query = self - .visible_excerpts(true, cx) - .into_values() - .map(|(buffer, ..)| buffer) + .visible_buffers(cx) + .into_iter() + .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx)) .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id))) .filter(|editor_buffer| { let editor_buffer_id = editor_buffer.read(cx).remote_id(); @@ -184,9 +184,9 @@ impl Editor { buffers_to_query .into_iter() .filter_map(|buffer| { - let buffer_id = buffer.read(cx).remote_id(); + let buffer_snapshot = buffer.read(cx).snapshot(); let colors_task = lsp_store.document_colors(buffer, cx)?; - Some(async move { (buffer_id, colors_task.await) }) + Some(async move { (buffer_snapshot, colors_task.await) }) }) .collect::>() }) @@ -200,40 +200,21 @@ impl Editor { if all_colors.is_empty() { return; } - let Ok((multi_buffer_snapshot, editor_excerpts)) = editor.update(cx, |editor, cx| { - let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx); - let editor_excerpts = multi_buffer_snapshot.excerpts().fold( - HashMap::default(), - |mut acc, (excerpt_id, buffer_snapshot, excerpt_range)| { - let excerpt_data = acc - .entry(buffer_snapshot.remote_id()) - .or_insert_with(Vec::new); - let excerpt_point_range = - excerpt_range.context.to_point_utf16(buffer_snapshot); - excerpt_data.push(( - excerpt_id, - buffer_snapshot.clone(), - excerpt_point_range, - )); - acc - }, - ); - (multi_buffer_snapshot, editor_excerpts) - }) else { + let Some(multi_buffer_snapshot) = editor + .update(cx, |editor, cx| editor.buffer.read(cx).snapshot(cx)) + .ok() + else { return; }; let mut new_editor_colors: HashMap, DocumentColor)>> = HashMap::default(); - for (buffer_id, colors) in all_colors { - let Some(excerpts) = editor_excerpts.get(&buffer_id) else { - continue; - }; + for (buffer_snapshot, colors) in all_colors { match colors { Ok(colors) => { if colors.colors.is_empty() { new_editor_colors - .entry(buffer_id) + .entry(buffer_snapshot.remote_id()) .or_insert_with(Vec::new) .clear(); } else { @@ -241,41 +222,33 @@ impl Editor { let color_start = point_from_lsp(color.lsp_range.start); let color_end = point_from_lsp(color.lsp_range.end); - for (excerpt_id, buffer_snapshot, excerpt_range) in excerpts { - if !excerpt_range.contains(&color_start.0) - || !excerpt_range.contains(&color_end.0) - { - continue; - } - let start = buffer_snapshot.anchor_before( - buffer_snapshot.clip_point_utf16(color_start, Bias::Left), - ); - let end = buffer_snapshot.anchor_after( - buffer_snapshot.clip_point_utf16(color_end, Bias::Right), - ); - let Some(range) = multi_buffer_snapshot - .anchor_range_in_excerpt(*excerpt_id, start..end) - else { - continue; - }; - - let new_buffer_colors = - new_editor_colors.entry(buffer_id).or_insert_with(Vec::new); - - let (Ok(i) | Err(i)) = - new_buffer_colors.binary_search_by(|(probe, _)| { - probe - .start - .cmp(&range.start, &multi_buffer_snapshot) - .then_with(|| { - probe - .end - .cmp(&range.end, &multi_buffer_snapshot) - }) - }); - new_buffer_colors.insert(i, (range, color)); - break; - } + let Some(range) = multi_buffer_snapshot + .buffer_anchor_range_to_anchor_range( + buffer_snapshot.anchor_range_outside( + buffer_snapshot + .clip_point_utf16(color_start, Bias::Left) + ..buffer_snapshot + .clip_point_utf16(color_end, Bias::Right), + ), + ) + else { + continue; + }; + + let new_buffer_colors = new_editor_colors + .entry(buffer_snapshot.remote_id()) + .or_insert_with(Vec::new); + + let (Ok(i) | Err(i)) = + new_buffer_colors.binary_search_by(|(probe, _)| { + probe + .start + .cmp(&range.start, &multi_buffer_snapshot) + .then_with(|| { + probe.end.cmp(&range.end, &multi_buffer_snapshot) + }) + }); + new_buffer_colors.insert(i, (range, color)); } } } diff --git a/crates/editor/src/document_symbols.rs b/crates/editor/src/document_symbols.rs index 0668a034c8755a8702e31ec3a060b7f3b79c6829..ef9159788a7a5c2b2c317015219090fdae6a4944 100644 --- a/crates/editor/src/document_symbols.rs +++ b/crates/editor/src/document_symbols.rs @@ -62,10 +62,10 @@ impl Editor { multi_buffer_snapshot: &MultiBufferSnapshot, cx: &Context, ) -> bool { - let Some(excerpt) = multi_buffer_snapshot.excerpt_containing(cursor..cursor) else { + let Some((anchor, _)) = multi_buffer_snapshot.anchor_to_buffer_anchor(cursor) else { return false; }; - let Some(buffer) = self.buffer.read(cx).buffer(excerpt.buffer_id()) else { + let Some(buffer) = self.buffer.read(cx).buffer(anchor.buffer_id) else { return false; }; lsp_symbols_enabled(buffer.read(cx), cx) @@ -77,19 +77,12 @@ impl Editor { &self, cursor: Anchor, multi_buffer_snapshot: &MultiBufferSnapshot, - cx: &Context, + _cx: &Context, ) -> Option<(BufferId, Vec>)> { - let excerpt = multi_buffer_snapshot.excerpt_containing(cursor..cursor)?; - let excerpt_id = excerpt.id(); - let buffer_id = excerpt.buffer_id(); - if Some(buffer_id) != cursor.text_anchor.buffer_id { - return None; - } - let buffer = self.buffer.read(cx).buffer(buffer_id)?; - let buffer_snapshot = buffer.read(cx).snapshot(); - let cursor_text_anchor = cursor.text_anchor; - - let all_items = self.lsp_document_symbols.get(&buffer_id)?; + let (cursor_text_anchor, buffer) = multi_buffer_snapshot.anchor_to_buffer_anchor(cursor)?; + let all_items = self + .lsp_document_symbols + .get(&cursor_text_anchor.buffer_id)?; if all_items.is_empty() { return None; } @@ -97,34 +90,36 @@ impl Editor { let mut symbols = all_items .iter() .filter(|item| { - item.range - .start - .cmp(&cursor_text_anchor, &buffer_snapshot) - .is_le() - && item - .range - .end - .cmp(&cursor_text_anchor, &buffer_snapshot) - .is_ge() + item.range.start.cmp(&cursor_text_anchor, buffer).is_le() + && item.range.end.cmp(&cursor_text_anchor, buffer).is_ge() }) - .map(|item| OutlineItem { - depth: item.depth, - range: Anchor::range_in_buffer(excerpt_id, item.range.clone()), - source_range_for_text: Anchor::range_in_buffer( - excerpt_id, - item.source_range_for_text.clone(), - ), - text: item.text.clone(), - highlight_ranges: item.highlight_ranges.clone(), - name_ranges: item.name_ranges.clone(), - body_range: item - .body_range - .as_ref() - .map(|r| Anchor::range_in_buffer(excerpt_id, r.clone())), - annotation_range: item - .annotation_range - .as_ref() - .map(|r| Anchor::range_in_buffer(excerpt_id, r.clone())), + .filter_map(|item| { + let range_start = multi_buffer_snapshot.anchor_in_buffer(item.range.start)?; + let range_end = multi_buffer_snapshot.anchor_in_buffer(item.range.end)?; + let source_range_for_text_start = + multi_buffer_snapshot.anchor_in_buffer(item.source_range_for_text.start)?; + let source_range_for_text_end = + multi_buffer_snapshot.anchor_in_buffer(item.source_range_for_text.end)?; + Some(OutlineItem { + depth: item.depth, + range: range_start..range_end, + source_range_for_text: source_range_for_text_start..source_range_for_text_end, + text: item.text.clone(), + highlight_ranges: item.highlight_ranges.clone(), + name_ranges: item.name_ranges.clone(), + body_range: item.body_range.as_ref().and_then(|r| { + Some( + multi_buffer_snapshot.anchor_in_buffer(r.start)? + ..multi_buffer_snapshot.anchor_in_buffer(r.end)?, + ) + }), + annotation_range: item.annotation_range.as_ref().and_then(|r| { + Some( + multi_buffer_snapshot.anchor_in_buffer(r.start)? + ..multi_buffer_snapshot.anchor_in_buffer(r.end)?, + ) + }), + }) }) .collect::>(); @@ -135,7 +130,7 @@ impl Editor { retain }); - Some((buffer_id, symbols)) + Some((buffer.remote_id(), symbols)) } /// Fetches document symbols from the LSP for buffers that have the setting @@ -155,9 +150,10 @@ impl Editor { }; let buffers_to_query = self - .visible_excerpts(true, cx) + .visible_buffers(cx) .into_iter() - .filter_map(|(_, (buffer, _, _))| { + .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx)) + .filter_map(|buffer| { let id = buffer.read(cx).remote_id(); if for_buffer.is_none_or(|target| target == id) && lsp_symbols_enabled(buffer.read(cx), cx) diff --git a/crates/editor/src/edit_prediction_tests.rs b/crates/editor/src/edit_prediction_tests.rs index 52939a9e5a8fd1a35a3a3c0bcd2a04b893bd6628..987801471e5602f256ce2dd65edd57873c878027 100644 --- a/crates/editor/src/edit_prediction_tests.rs +++ b/crates/editor/src/edit_prediction_tests.rs @@ -7,7 +7,7 @@ use gpui::{ use indoc::indoc; use language::EditPredictionsMode; use language::{Buffer, CodeLabel}; -use multi_buffer::{Anchor, ExcerptId, MultiBufferSnapshot, ToPoint}; +use multi_buffer::{Anchor, MultiBufferSnapshot, ToPoint}; use project::{Completion, CompletionResponse, CompletionSource}; use std::{ ops::Range, @@ -1081,6 +1081,44 @@ async fn test_cancel_clears_stale_edit_prediction_in_menu(cx: &mut gpui::TestApp }); } +#[gpui::test] +async fn test_discard_clears_delegate_completion(cx: &mut gpui::TestAppContext) { + init_test(cx, |_| {}); + load_default_keymap(cx); + + let mut cx = EditorTestContext::new(cx).await; + let provider = cx.new(|_| FakeEditPredictionDelegate::default()); + assign_editor_completion_provider(provider.clone(), &mut cx); + cx.set_state("let x = ˇ;"); + + propose_edits(&provider, vec![(8..8, "42")], &mut cx); + cx.update_editor(|editor, window, cx| editor.update_visible_edit_prediction(window, cx)); + + cx.update_editor(|editor, _window, _cx| { + assert!(editor.active_edit_prediction.is_some()); + }); + + // Dismiss the prediction — this must call discard() on the delegate, + // which should clear self.completion. + cx.simulate_keystroke("escape"); + cx.run_until_parked(); + + cx.update_editor(|editor, _window, _cx| { + assert!(editor.active_edit_prediction.is_none()); + }); + + // update_visible_edit_prediction must NOT bring the prediction back, + // because discard() cleared self.completion in the delegate. + cx.update_editor(|editor, window, cx| editor.update_visible_edit_prediction(window, cx)); + + cx.update_editor(|editor, _window, _cx| { + assert!( + editor.active_edit_prediction.is_none(), + "prediction must not resurface after discard()" + ); + }); +} + fn accept_completion(cx: &mut EditorTestContext) { cx.update_editor(|editor, window, cx| { editor.accept_edit_prediction(&crate::AcceptEditPrediction, window, cx) @@ -1242,15 +1280,14 @@ struct FakeCompletionMenuProvider; impl CompletionProvider for FakeCompletionMenuProvider { fn completions( &self, - _excerpt_id: ExcerptId, - _buffer: &Entity, + buffer: &Entity, _buffer_position: text::Anchor, _trigger: CompletionContext, _window: &mut Window, - _cx: &mut Context, + cx: &mut Context, ) -> Task>> { let completion = Completion { - replace_range: text::Anchor::MIN..text::Anchor::MAX, + replace_range: text::Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id()), new_text: "fake_completion".to_string(), label: CodeLabel::plain("fake_completion".to_string(), None), documentation: None, @@ -1351,6 +1388,7 @@ impl EditPredictionDelegate for FakeEditPredictionDelegate { _reason: edit_prediction_types::EditPredictionDiscardReason, _cx: &mut gpui::Context, ) { + self.completion.take(); } fn suggest<'a>( @@ -1427,6 +1465,7 @@ impl EditPredictionDelegate for FakeNonZedEditPredictionDelegate { _reason: edit_prediction_types::EditPredictionDiscardReason, _cx: &mut gpui::Context, ) { + self.completion.take(); } fn suggest<'a>( diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 76ec95928dc729e12060e75f8ec7d61197624c5f..6550d79c9f73799d37ccf6433db38f2719636ee6 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -76,7 +76,7 @@ pub use linked_editing_ranges::LinkedEdits; pub use lsp::CompletionContext; pub use lsp_ext::lsp_tasks; pub use multi_buffer::{ - Anchor, AnchorRangeExt, BufferOffset, ExcerptId, ExcerptRange, MBTextSummary, MultiBuffer, + Anchor, AnchorRangeExt, BufferOffset, ExcerptRange, MBTextSummary, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16, MultiBufferSnapshot, PathKey, RowInfo, ToOffset, ToPoint, }; @@ -150,7 +150,8 @@ use markdown::Markdown; use mouse_context_menu::MouseContextMenu; use movement::TextLayoutDetails; use multi_buffer::{ - ExcerptInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow, + ExcerptBoundaryInfo, ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, + MultiBufferRow, }; use parking_lot::Mutex; use persistence::EditorDb; @@ -640,6 +641,7 @@ pub(crate) enum EditDisplayMode { enum EditPrediction { Edit { + // TODO could be a language::Anchor? edits: Vec<(Range, Arc)>, /// Predicted cursor position as (anchor, offset_from_anchor). /// The anchor is in multibuffer coordinates; after applying edits, @@ -887,7 +889,8 @@ pub trait Addon: 'static { fn render_buffer_header_controls( &self, - _: &ExcerptInfo, + _: &ExcerptBoundaryInfo, + _: &language::BufferSnapshot, _: &Window, _: &App, ) -> Option { @@ -1340,7 +1343,7 @@ pub struct Editor { suppress_selection_callback: bool, applicable_language_settings: HashMap, LanguageSettings>, accent_data: Option, - bracket_fetched_tree_sitter_chunks: HashMap>>, + bracket_fetched_tree_sitter_chunks: HashMap, HashSet>>, semantic_token_state: SemanticTokenState, pub(crate) refresh_matching_bracket_highlights_task: Task<()>, refresh_document_symbols_task: Shared>, @@ -1763,15 +1766,13 @@ impl ClipboardSelection { project.absolute_path(&project_path, cx) }); - let line_range = file_path.as_ref().and_then(|_| { - let (_, start_point, start_excerpt_id) = buffer.point_to_buffer_point(range.start)?; - let (_, end_point, end_excerpt_id) = buffer.point_to_buffer_point(range.end)?; - if start_excerpt_id == end_excerpt_id { - Some(start_point.row..=end_point.row) - } else { - None - } - }); + let line_range = if file_path.is_some() { + buffer + .range_to_buffer_range(range) + .map(|(_, buffer_range)| buffer_range.start.row..=buffer_range.end.row) + } else { + None + }; Self { len, @@ -1852,9 +1853,8 @@ pub enum JumpData { line_offset_from_top: u32, }, MultiBufferPoint { - excerpt_id: ExcerptId, + anchor: language::Anchor, position: Point, - anchor: text::Anchor, line_offset_from_top: u32, }, } @@ -1990,17 +1990,21 @@ impl Editor { if !self.mode.is_full() { return; } - let multi_buffer = display_snapshot.buffer_snapshot(); + let multi_buffer = display_snapshot.buffer_snapshot().clone(); let scroll_anchor = self .scroll_manager .native_anchor(display_snapshot, cx) .anchor; - let Some((excerpt_id, _, buffer)) = multi_buffer.as_singleton() else { + let Some(buffer_snapshot) = multi_buffer.as_singleton() else { return; }; - let buffer = buffer.clone(); - let buffer_visible_start = scroll_anchor.text_anchor.to_point(&buffer); + let buffer = buffer_snapshot.clone(); + let Some((buffer_visible_start, _)) = multi_buffer.anchor_to_buffer_anchor(scroll_anchor) + else { + return; + }; + let buffer_visible_start = buffer_visible_start.to_point(&buffer); let max_row = buffer.max_point().row; let start_row = buffer_visible_start.row.min(max_row); let end_row = (buffer_visible_start.row + 10).min(max_row); @@ -2014,22 +2018,24 @@ impl Editor { Some(syntax.as_ref()), ) .into_iter() - .map(|outline_item| OutlineItem { - depth: outline_item.depth, - range: Anchor::range_in_buffer(excerpt_id, outline_item.range), - source_range_for_text: Anchor::range_in_buffer( - excerpt_id, - outline_item.source_range_for_text, - ), - text: outline_item.text, - highlight_ranges: outline_item.highlight_ranges, - name_ranges: outline_item.name_ranges, - body_range: outline_item - .body_range - .map(|range| Anchor::range_in_buffer(excerpt_id, range)), - annotation_range: outline_item - .annotation_range - .map(|range| Anchor::range_in_buffer(excerpt_id, range)), + .filter_map(|outline_item| { + Some(OutlineItem { + depth: outline_item.depth, + range: multi_buffer + .buffer_anchor_range_to_anchor_range(outline_item.range)?, + source_range_for_text: multi_buffer.buffer_anchor_range_to_anchor_range( + outline_item.source_range_for_text, + )?, + text: outline_item.text, + highlight_ranges: outline_item.highlight_ranges, + name_ranges: outline_item.name_ranges, + body_range: outline_item.body_range.and_then(|range| { + multi_buffer.buffer_anchor_range_to_anchor_range(range) + }), + annotation_range: outline_item.annotation_range.and_then(|range| { + multi_buffer.buffer_anchor_range_to_anchor_range(range) + }), + }) }) .collect() }); @@ -3024,7 +3030,10 @@ impl Editor { fn edit_prediction_cursor_popover_prefers_preview( &self, completion: &EditPredictionState, + cx: &App, ) -> bool { + let multibuffer_snapshot = self.buffer.read(cx).snapshot(cx); + match &completion.completion { EditPrediction::Edit { edits, snapshot, .. @@ -3033,8 +3042,13 @@ impl Editor { let mut end_row: Option = None; for (range, text) in edits { - let edit_start_row = range.start.text_anchor.to_point(snapshot).row; - let old_end_row = range.end.text_anchor.to_point(snapshot).row; + let Some((_, range)) = + multibuffer_snapshot.anchor_range_to_buffer_anchor_range(range.clone()) + else { + continue; + }; + let edit_start_row = range.start.to_point(snapshot).row; + let old_end_row = range.end.to_point(snapshot).row; let inserted_newline_count = text .as_ref() .chars() @@ -3083,7 +3097,7 @@ impl Editor { .active_edit_prediction .as_ref() .filter(|completion| { - self.edit_prediction_cursor_popover_prefers_preview(completion) + self.edit_prediction_cursor_popover_prefers_preview(completion, cx) }) .map_or(EditPredictionKeybindAction::Accept, |_| { EditPredictionKeybindAction::Preview @@ -3320,13 +3334,12 @@ impl Editor { self.buffer.read(cx).read(cx).file_at(point).cloned() } - pub fn active_excerpt( - &self, - cx: &App, - ) -> Option<(ExcerptId, Entity, Range)> { - self.buffer - .read(cx) - .excerpt_containing(self.selections.newest_anchor().head(), cx) + pub fn active_buffer(&self, cx: &App) -> Option> { + let multibuffer = self.buffer.read(cx); + let snapshot = multibuffer.snapshot(cx); + let (anchor, _) = + snapshot.anchor_to_buffer_anchor(self.selections.newest_anchor().head())?; + multibuffer.buffer(anchor.buffer_id) } pub fn mode(&self) -> &EditorMode { @@ -3695,8 +3708,8 @@ impl Editor { } if local { - if let Some(buffer_id) = new_cursor_position.text_anchor.buffer_id { - self.register_buffer(buffer_id, cx); + if let Some((anchor, _)) = buffer.anchor_to_buffer_anchor(new_cursor_position) { + self.register_buffer(anchor.buffer_id, cx); } let mut context_menu = self.context_menu.borrow_mut(); @@ -3778,12 +3791,13 @@ impl Editor { if selections.len() == 1 { cx.emit(SearchEvent::ActiveMatchChanged) } - if local && let Some((_, _, buffer_snapshot)) = buffer.as_singleton() { + if local && let Some(buffer_snapshot) = buffer.as_singleton() { let inmemory_selections = selections .iter() .map(|s| { - text::ToPoint::to_point(&s.range().start.text_anchor, buffer_snapshot) - ..text::ToPoint::to_point(&s.range().end.text_anchor, buffer_snapshot) + let start = s.range().start.text_anchor_in(buffer_snapshot); + let end = s.range().end.text_anchor_in(buffer_snapshot); + (start..end).to_point(buffer_snapshot) }) .collect(); self.update_restoration_data(cx, |data| { @@ -3829,7 +3843,6 @@ impl Editor { fn folds_did_change(&mut self, cx: &mut Context) { use text::ToOffset as _; - use text::ToPoint as _; if self.mode.is_minimap() || WorkspaceSettings::get(None, cx).restore_on_startup @@ -3838,21 +3851,18 @@ impl Editor { return; } - if !self.buffer().read(cx).is_singleton() { - return; - } - let display_snapshot = self .display_map .update(cx, |display_map, cx| display_map.snapshot(cx)); - let Some((.., snapshot)) = display_snapshot.buffer_snapshot().as_singleton() else { + let Some(buffer_snapshot) = display_snapshot.buffer_snapshot().as_singleton() else { return; }; let inmemory_folds = display_snapshot .folds_in_range(MultiBufferOffset(0)..display_snapshot.buffer_snapshot().len()) .map(|fold| { - fold.range.start.text_anchor.to_point(&snapshot) - ..fold.range.end.text_anchor.to_point(&snapshot) + let start = fold.range.start.text_anchor_in(buffer_snapshot); + let end = fold.range.end.text_anchor_in(buffer_snapshot); + (start..end).to_point(buffer_snapshot) }) .collect(); self.update_restoration_data(cx, |data| { @@ -3876,8 +3886,16 @@ impl Editor { let db_folds = display_snapshot .folds_in_range(MultiBufferOffset(0)..display_snapshot.buffer_snapshot().len()) .map(|fold| { - let start = fold.range.start.text_anchor.to_offset(&snapshot); - let end = fold.range.end.text_anchor.to_offset(&snapshot); + let start = fold + .range + .start + .text_anchor_in(buffer_snapshot) + .to_offset(buffer_snapshot); + let end = fold + .range + .end + .text_anchor_in(buffer_snapshot) + .to_offset(buffer_snapshot); // Extract fingerprints - content at fold boundaries for validation on restore // Both fingerprints must be INSIDE the fold to avoid capturing surrounding @@ -3886,12 +3904,14 @@ impl Editor { // end_fp: last min(32, fold_len) bytes of fold content // Clip to character boundaries to handle multibyte UTF-8 characters. let fold_len = end - start; - let start_fp_end = snapshot + let start_fp_end = buffer_snapshot .clip_offset(start + std::cmp::min(FINGERPRINT_LEN, fold_len), Bias::Left); - let start_fp: String = snapshot.text_for_range(start..start_fp_end).collect(); - let end_fp_start = snapshot + let start_fp: String = buffer_snapshot + .text_for_range(start..start_fp_end) + .collect(); + let end_fp_start = buffer_snapshot .clip_offset(end.saturating_sub(FINGERPRINT_LEN).max(start), Bias::Right); - let end_fp: String = snapshot.text_for_range(end_fp_start..end).collect(); + let end_fp: String = buffer_snapshot.text_for_range(end_fp_start..end).collect(); (start, end, start_fp, end_fp) }) @@ -4654,30 +4674,31 @@ impl Editor { fn linked_editing_ranges_for( &self, - selection: Range, + query_range: Range, cx: &App, ) -> Option, Vec>>> { + use text::ToOffset as TO; + if self.linked_edit_ranges.is_empty() { return None; } - let ((base_range, linked_ranges), buffer_snapshot, buffer) = - selection.end.buffer_id.and_then(|end_buffer_id| { - if selection.start.buffer_id != Some(end_buffer_id) { - return None; - } - let buffer = self.buffer.read(cx).buffer(end_buffer_id)?; - let snapshot = buffer.read(cx).snapshot(); - self.linked_edit_ranges - .get(end_buffer_id, selection.start..selection.end, &snapshot) - .map(|ranges| (ranges, snapshot, buffer)) - })?; - use text::ToOffset as TO; + if query_range.start.buffer_id != query_range.end.buffer_id { + return None; + }; + let multibuffer_snapshot = self.buffer.read(cx).snapshot(cx); + let buffer = self.buffer.read(cx).buffer(query_range.end.buffer_id)?; + let buffer_snapshot = buffer.read(cx).snapshot(); + let (base_range, linked_ranges) = self.linked_edit_ranges.get( + buffer_snapshot.remote_id(), + query_range.clone(), + &buffer_snapshot, + )?; // find offset from the start of current range to current cursor position let start_byte_offset = TO::to_offset(&base_range.start, &buffer_snapshot); - let start_offset = TO::to_offset(&selection.start, &buffer_snapshot); + let start_offset = TO::to_offset(&query_range.start, &buffer_snapshot); let start_difference = start_offset - start_byte_offset; - let end_offset = TO::to_offset(&selection.end, &buffer_snapshot); + let end_offset = TO::to_offset(&query_range.end, &buffer_snapshot); let end_difference = end_offset - start_byte_offset; // Current range has associated linked ranges. @@ -4690,13 +4711,22 @@ impl Editor { continue; } if self.selections.disjoint_anchor_ranges().any(|s| { - if s.start.text_anchor.buffer_id != selection.start.buffer_id - || s.end.text_anchor.buffer_id != selection.end.buffer_id + let Some((selection_start, _)) = + multibuffer_snapshot.anchor_to_buffer_anchor(s.start) + else { + return false; + }; + let Some((selection_end, _)) = multibuffer_snapshot.anchor_to_buffer_anchor(s.end) + else { + return false; + }; + if selection_start.buffer_id != query_range.start.buffer_id + || selection_end.buffer_id != query_range.end.buffer_id { return false; } - TO::to_offset(&s.start.text_anchor, &buffer_snapshot) <= end_offset - && TO::to_offset(&s.end.text_anchor, &buffer_snapshot) >= start_offset + TO::to_offset(&selection_start, &buffer_snapshot) <= end_offset + && TO::to_offset(&selection_end, &buffer_snapshot) >= start_offset }) { continue; } @@ -5015,21 +5045,26 @@ impl Editor { if !self.linked_edit_ranges.is_empty() { let start_anchor = snapshot.anchor_before(selection.start); + let classifier = snapshot + .char_classifier_at(start_anchor) + .scope_context(Some(CharScopeContext::LinkedEdit)); - let is_word_char = text.chars().next().is_none_or(|char| { - let classifier = snapshot - .char_classifier_at(start_anchor.to_offset(&snapshot)) - .scope_context(Some(CharScopeContext::LinkedEdit)); - classifier.is_word(char) - }); - let is_dot = text.as_ref() == "."; - let should_apply_linked_edit = is_word_char || is_dot; + if let Some((_, anchor_range)) = + snapshot.anchor_range_to_buffer_anchor_range(start_anchor..anchor) + { + let is_word_char = text + .chars() + .next() + .is_none_or(|char| classifier.is_word(char)); - if should_apply_linked_edit { - let anchor_range = start_anchor.text_anchor..anchor.text_anchor; - linked_edits.push(&self, anchor_range, text.clone(), cx); - } else { - clear_linked_edit_ranges = true; + let is_dot = text.as_ref() == "."; + let should_apply_linked_edit = is_word_char || is_dot; + + if should_apply_linked_edit { + linked_edits.push(&self, anchor_range, text.clone(), cx); + } else { + clear_linked_edit_ranges = true; + } } } @@ -5522,7 +5557,7 @@ impl Editor { let row = cursor.row; let point = Point::new(row, 0); - let Some((buffer_handle, buffer_point, _)) = + let Some((buffer_handle, buffer_point)) = self.buffer.read(cx).point_to_buffer_point(point, cx) else { continue; @@ -5662,12 +5697,16 @@ impl Editor { /// Collects linked edits for the current selections, pairing each linked /// range with `text`. pub fn linked_edits_for_selections(&self, text: Arc, cx: &App) -> LinkedEdits { + let multibuffer_snapshot = self.buffer().read(cx).snapshot(cx); let mut linked_edits = LinkedEdits::new(); if !self.linked_edit_ranges.is_empty() { for selection in self.selections.disjoint_anchors() { - let start = selection.start.text_anchor; - let end = selection.end.text_anchor; - linked_edits.push(self, start..end, text.clone(), cx); + let Some((_, range)) = + multibuffer_snapshot.anchor_range_to_buffer_anchor_range(selection.range()) + else { + continue; + }; + linked_edits.push(self, range, text.clone(), cx); } } linked_edits @@ -5898,53 +5937,54 @@ impl Editor { } } - pub fn visible_excerpts( - &self, - lsp_related_only: bool, - cx: &mut Context, - ) -> HashMap, clock::Global, Range)> { - let project = self.project().cloned(); - let display_snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx)); + pub fn is_lsp_relevant(&self, file: Option<&Arc>, cx: &App) -> bool { + let Some(project) = self.project() else { + return false; + }; + let Some(buffer_file) = project::File::from_dyn(file) else { + return false; + }; + let Some(entry_id) = buffer_file.project_entry_id() else { + return false; + }; + let project = project.read(cx); + let Some(buffer_worktree) = project.worktree_for_id(buffer_file.worktree_id(cx), cx) else { + return false; + }; + let Some(worktree_entry) = buffer_worktree.read(cx).entry_for_id(entry_id) else { + return false; + }; + !worktree_entry.is_ignored + } + + pub fn visible_buffers(&self, cx: &mut Context) -> Vec> { + let display_snapshot = self.display_snapshot(cx); + let visible_range = self.multi_buffer_visible_range(&display_snapshot, cx); let multi_buffer = self.buffer().read(cx); - let multi_buffer_snapshot = multi_buffer.snapshot(cx); - multi_buffer_snapshot - .range_to_buffer_ranges( - self.multi_buffer_visible_range(&display_snapshot, cx) - .to_inclusive(), - ) + display_snapshot + .buffer_snapshot() + .range_to_buffer_ranges(visible_range) .into_iter() .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()) - .filter_map(|(buffer, excerpt_visible_range, excerpt_id)| { - if !lsp_related_only { - return Some(( - excerpt_id, - ( - multi_buffer.buffer(buffer.remote_id()).unwrap(), - buffer.version().clone(), - excerpt_visible_range.start.0..excerpt_visible_range.end.0, - ), - )); - } + .filter_map(|(buffer_snapshot, _, _)| multi_buffer.buffer(buffer_snapshot.remote_id())) + .collect() + } - let project = project.as_ref()?.read(cx); - let buffer_file = project::File::from_dyn(buffer.file())?; - let buffer_worktree = project.worktree_for_id(buffer_file.worktree_id(cx), cx)?; - let worktree_entry = buffer_worktree - .read(cx) - .entry_for_id(buffer_file.project_entry_id()?)?; - if worktree_entry.is_ignored { - None - } else { - Some(( - excerpt_id, - ( - multi_buffer.buffer(buffer.remote_id()).unwrap(), - buffer.version().clone(), - excerpt_visible_range.start.0..excerpt_visible_range.end.0, - ), - )) - } - }) + pub fn visible_buffer_ranges( + &self, + cx: &mut Context, + ) -> Vec<( + BufferSnapshot, + Range, + ExcerptRange, + )> { + let display_snapshot = self.display_snapshot(cx); + let visible_range = self.multi_buffer_visible_range(&display_snapshot, cx); + display_snapshot + .buffer_snapshot() + .range_to_buffer_ranges(visible_range) + .into_iter() + .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()) .collect() } @@ -6069,17 +6109,19 @@ impl Editor { .newest_anchor() .start .bias_right(&multibuffer_snapshot); - if position.diff_base_anchor.is_some() { + + if position.diff_base_anchor().is_some() { return; } - let buffer_position = multibuffer_snapshot.anchor_before(position); - let Some(buffer) = buffer_position - .text_anchor - .buffer_id - .and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)) + let multibuffer_position = multibuffer_snapshot.anchor_before(position); + let Some((buffer_position, _)) = + multibuffer_snapshot.anchor_to_buffer_anchor(multibuffer_position) else { return; }; + let Some(buffer) = self.buffer.read(cx).buffer(buffer_position.buffer_id) else { + return; + }; let buffer_snapshot = buffer.read(cx).snapshot(); let menu_is_open = matches!( @@ -6088,9 +6130,9 @@ impl Editor { ); let language = buffer_snapshot - .language_at(buffer_position.text_anchor) + .language_at(buffer_position) .map(|language| language.name()); - let language_settings = multibuffer_snapshot.language_settings_at(buffer_position, cx); + let language_settings = multibuffer_snapshot.language_settings_at(multibuffer_position, cx); let completion_settings = language_settings.completions.clone(); let show_completions_on_input = self @@ -6101,7 +6143,7 @@ impl Editor { } let query: Option> = - Self::completion_query(&multibuffer_snapshot, buffer_position) + Self::completion_query(&multibuffer_snapshot, multibuffer_position) .map(|query| query.into()); drop(multibuffer_snapshot); @@ -6143,7 +6185,7 @@ impl Editor { if filter_completions { menu.filter( query.clone().unwrap_or_default(), - buffer_position.text_anchor, + buffer_position, &buffer, provider.clone(), window, @@ -6177,12 +6219,6 @@ impl Editor { } }; - let Anchor { - excerpt_id: buffer_excerpt_id, - text_anchor: buffer_position, - .. - } = buffer_position; - let (word_replace_range, word_to_exclude) = if let (word_range, Some(CharKind::Word)) = buffer_snapshot.surrounding_word(buffer_position, None) { @@ -6225,7 +6261,7 @@ impl Editor { trigger.as_ref().is_none_or(|trigger| { provider.is_completion_trigger( &buffer, - position.text_anchor, + buffer_position, trigger, trigger_in_words, cx, @@ -6246,14 +6282,7 @@ impl Editor { trigger_character, }; - provider.completions( - buffer_excerpt_id, - &buffer, - buffer_position, - completion_context, - window, - cx, - ) + provider.completions(&buffer, buffer_position, completion_context, window, cx) } else { Task::ready(Ok(Vec::new())) }; @@ -6593,42 +6622,42 @@ impl Editor { cx.stop_propagation(); let buffer_handle = completions_menu.buffer.clone(); + let multibuffer_snapshot = self.buffer.read(cx).snapshot(cx); + let (initial_position, _) = + multibuffer_snapshot.anchor_to_buffer_anchor(completions_menu.initial_position)?; let CompletionEdit { new_text, snippet, replace_range, - } = process_completion_for_edit( - &completion, - intent, - &buffer_handle, - &completions_menu.initial_position.text_anchor, - cx, - ); + } = process_completion_for_edit(&completion, intent, &buffer_handle, &initial_position, cx); - let buffer = buffer_handle.read(cx); - let snapshot = self.buffer.read(cx).snapshot(cx); - let newest_anchor = self.selections.newest_anchor(); - let replace_range_multibuffer = { - let mut excerpt = snapshot.excerpt_containing(newest_anchor.range()).unwrap(); - excerpt.map_range_from_buffer(replace_range.clone()) + let buffer = buffer_handle.read(cx).snapshot(); + let newest_selection = self.selections.newest_anchor(); + + let Some(replace_range_multibuffer) = + multibuffer_snapshot.buffer_anchor_range_to_anchor_range(replace_range.clone()) + else { + return None; }; - if snapshot.buffer_id_for_anchor(newest_anchor.head()) != Some(buffer.remote_id()) { + + let Some((buffer_snapshot, newest_range_buffer)) = + multibuffer_snapshot.anchor_range_to_buffer_anchor_range(newest_selection.range()) + else { return None; - } + }; let old_text = buffer .text_for_range(replace_range.clone()) .collect::(); - let lookbehind = newest_anchor + let lookbehind = newest_range_buffer .start - .text_anchor - .to_offset(buffer) - .saturating_sub(replace_range.start.0); + .to_offset(buffer_snapshot) + .saturating_sub(replace_range.start.to_offset(&buffer_snapshot)); let lookahead = replace_range .end - .0 - .saturating_sub(newest_anchor.end.text_anchor.to_offset(buffer)); + .to_offset(&buffer_snapshot) + .saturating_sub(newest_range_buffer.end.to_offset(&buffer)); let prefix = &old_text[..old_text.len().saturating_sub(lookahead)]; let suffix = &old_text[lookbehind.min(old_text.len())..]; @@ -6641,34 +6670,40 @@ impl Editor { let text: Arc = new_text.clone().into(); for selection in &selections { - let range = if selection.id == newest_anchor.id { + let range = if selection.id == newest_selection.id { replace_range_multibuffer.clone() } else { let mut range = selection.range(); // if prefix is present, don't duplicate it - if snapshot.contains_str_at(range.start.saturating_sub_usize(lookbehind), prefix) { + if multibuffer_snapshot + .contains_str_at(range.start.saturating_sub_usize(lookbehind), prefix) + { range.start = range.start.saturating_sub_usize(lookbehind); // if suffix is also present, mimic the newest cursor and replace it - if selection.id != newest_anchor.id - && snapshot.contains_str_at(range.end, suffix) + if selection.id != newest_selection.id + && multibuffer_snapshot.contains_str_at(range.end, suffix) { range.end += lookahead; } } - range + range.to_anchors(&multibuffer_snapshot) }; ranges.push(range.clone()); - let start_anchor = snapshot.anchor_before(range.start); - let end_anchor = snapshot.anchor_after(range.end); - let anchor_range = start_anchor.text_anchor..end_anchor.text_anchor; - all_commit_ranges.push(anchor_range.clone()); + let start_anchor = multibuffer_snapshot.anchor_before(range.start); + let end_anchor = multibuffer_snapshot.anchor_after(range.end); - if !self.linked_edit_ranges.is_empty() { - linked_edits.push(&self, anchor_range, text.clone(), cx); + if let Some((buffer_snapshot_2, anchor_range)) = + multibuffer_snapshot.anchor_range_to_buffer_anchor_range(start_anchor..end_anchor) + && buffer_snapshot_2.remote_id() == buffer_snapshot.remote_id() + { + all_commit_ranges.push(anchor_range.clone()); + if !self.linked_edit_ranges.is_empty() { + linked_edits.push(&self, anchor_range, text.clone(), cx); + } } } @@ -6687,8 +6722,12 @@ impl Editor { let tx_id = self.transact(window, cx, |editor, window, cx| { if let Some(mut snippet) = snippet { snippet.text = new_text.to_string(); + let offset_ranges = ranges + .iter() + .map(|range| range.to_offset(&multibuffer_snapshot)) + .collect::>(); editor - .insert_snippet(&ranges, snippet, window, cx) + .insert_snippet(&offset_ranges, snippet, window, cx) .log_err(); } else { editor.buffer.update(cx, |multi_buffer, cx| { @@ -6703,7 +6742,10 @@ impl Editor { linked_edits.apply(cx); editor.refresh_edit_prediction(true, false, window, cx); }); - self.invalidate_autoclose_regions(&self.selections.disjoint_anchors_arc(), &snapshot); + self.invalidate_autoclose_regions( + &self.selections.disjoint_anchors_arc(), + &multibuffer_snapshot, + ); let show_new_completions_on_confirm = completion .confirm @@ -6739,7 +6781,7 @@ impl Editor { if available_commands.contains(&lsp_command.command) { Some(CodeAction { server_id: *server_id, - range: language::Anchor::MIN..language::Anchor::MIN, + range: language::Anchor::min_min_range_for_buffer(buffer.remote_id()), lsp_action: LspAction::Command(lsp_command.clone()), resolved: false, }) @@ -7069,13 +7111,9 @@ impl Editor { Some(Task::ready(Ok(()))) }) } - CodeActionsItem::CodeAction { - excerpt_id, - action, - provider, - } => { + CodeActionsItem::CodeAction { action, provider } => { let apply_code_action = - provider.apply_code_action(buffer, action, excerpt_id, true, window, cx); + provider.apply_code_action(buffer, action, true, window, cx); let workspace = workspace.downgrade(); Some(cx.spawn_in(window, async move |editor, cx| { let project_transaction = apply_code_action.await?; @@ -7175,17 +7213,19 @@ impl Editor { // avoid opening a new editor to display them. if let [(buffer, transaction)] = &*entries { - let excerpt = editor.update(cx, |editor, cx| { - editor - .buffer() - .read(cx) - .excerpt_containing(editor.selections.newest_anchor().head(), cx) + let cursor_excerpt = editor.update(cx, |editor, cx| { + let snapshot = editor.buffer().read(cx).snapshot(cx); + let head = editor.selections.newest_anchor().head(); + let (buffer_snapshot, excerpt_range) = snapshot.excerpt_containing(head..head)?; + if buffer_snapshot.remote_id() != buffer.read(cx).remote_id() { + return None; + } + Some(excerpt_range) })?; - if let Some((_, excerpted_buffer, excerpt_range)) = excerpt - && excerpted_buffer == *buffer - { + + if let Some(excerpt_range) = cursor_excerpt { let all_edits_within_excerpt = buffer.read_with(cx, |buffer, _| { - let excerpt_range = excerpt_range.to_offset(buffer); + let excerpt_range = excerpt_range.context.to_offset(buffer); buffer .edited_ranges_for_transaction::(transaction) .all(|range| { @@ -7207,15 +7247,21 @@ impl Editor { .read(cx) .edited_ranges_for_transaction::(transaction) .collect::>(); - let (ranges, _) = multibuffer.set_excerpts_for_path( + multibuffer.set_excerpts_for_path( PathKey::for_buffer(buffer_handle, cx), buffer_handle.clone(), - edited_ranges, + edited_ranges.clone(), multibuffer_context_lines(cx), cx, ); - - ranges_to_highlight.extend(ranges); + let snapshot = multibuffer.snapshot(cx); + let buffer_snapshot = buffer_handle.read(cx).snapshot(); + ranges_to_highlight.extend(edited_ranges.into_iter().filter_map(|range| { + let text_range = buffer_snapshot.anchor_range_inside(range); + let start = snapshot.anchor_in_buffer(text_range.start)?; + let end = snapshot.anchor_in_buffer(text_range.end)?; + Some(start..end) + })); } multibuffer.push_transaction(entries.iter().map(|(b, t)| (b, t)), cx); multibuffer @@ -7339,10 +7385,10 @@ impl Editor { .timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT) .await; - let (start_buffer, start, _, end, newest_selection) = this + let (start_buffer, start, _, end, _newest_selection) = this .update(cx, |this, cx| { let newest_selection = this.selections.newest_anchor().clone(); - if newest_selection.head().diff_base_anchor.is_some() { + if newest_selection.head().diff_base_anchor().is_some() { return None; } let display_snapshot = this.display_snapshot(cx); @@ -7378,7 +7424,6 @@ impl Editor { if let Some(provider_actions) = provider_actions.log_err() { actions.extend(provider_actions.into_iter().map(|action| { AvailableCodeAction { - excerpt_id: newest_selection.start.excerpt_id, action, provider: provider.clone(), } @@ -7426,8 +7471,7 @@ impl Editor { .selections .newest::(&snapshot.display_snapshot) .head(); - let Some((buffer, point, _)) = snapshot.buffer_snapshot().point_to_buffer_point(cursor) - else { + let Some((buffer, point)) = snapshot.buffer_snapshot().point_to_buffer_point(cursor) else { return; }; @@ -7612,27 +7656,13 @@ impl Editor { return; } - let cursor_buffer_snapshot = cursor_buffer.read(cx); let mut write_ranges = Vec::new(); let mut read_ranges = Vec::new(); + let multibuffer_snapshot = buffer.snapshot(cx); for highlight in highlights { - let buffer_id = cursor_buffer.read(cx).remote_id(); - for (excerpt_id, _, excerpt_range) in - buffer.excerpts_for_buffer(buffer_id, cx) + for range in + multibuffer_snapshot.buffer_range_to_excerpt_ranges(highlight.range) { - let start = highlight - .range - .start - .max(&excerpt_range.context.start, cursor_buffer_snapshot); - let end = highlight - .range - .end - .min(&excerpt_range.context.end, cursor_buffer_snapshot); - if start.cmp(&end, cursor_buffer_snapshot).is_ge() { - continue; - } - - let range = Anchor::range_in_buffer(excerpt_id, *start..*end); if highlight.kind == lsp::DocumentHighlightKind::WRITE { write_ranges.push(range); } else { @@ -7713,7 +7743,7 @@ impl Editor { let match_task = cx.background_spawn(async move { let buffer_ranges = multi_buffer_snapshot .range_to_buffer_ranges( - multi_buffer_range_to_query.start..=multi_buffer_range_to_query.end, + multi_buffer_range_to_query.start..multi_buffer_range_to_query.end, ) .into_iter() .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()); @@ -7731,11 +7761,11 @@ impl Editor { return Vec::default(); }; let query_range = query_range.to_anchors(&multi_buffer_snapshot); - for (buffer_snapshot, search_range, excerpt_id) in buffer_ranges { + for (buffer_snapshot, search_range, _) in buffer_ranges { match_ranges.extend( regex .search( - buffer_snapshot, + &buffer_snapshot, Some(search_range.start.0..search_range.end.0), ) .await @@ -7745,9 +7775,14 @@ impl Editor { .anchor_after(search_range.start + match_range.start); let match_end = buffer_snapshot .anchor_before(search_range.start + match_range.end); - let match_anchor_range = - Anchor::range_in_buffer(excerpt_id, match_start..match_end); - (match_anchor_range != query_range).then_some(match_anchor_range) + { + let range = multi_buffer_snapshot + .anchor_in_buffer(match_start)? + ..multi_buffer_snapshot.anchor_in_buffer(match_end)?; + Some(range).filter(|match_anchor_range| { + match_anchor_range != &query_range + }) + } }), ); } @@ -8434,13 +8469,15 @@ impl Editor { return; }; - let Some((_, buffer, _)) = self - .buffer - .read(cx) - .excerpt_containing(self.selections.newest_anchor().head(), cx) + let buffer_snapshot = self.buffer.read(cx).snapshot(cx); + let Some((position, _)) = + buffer_snapshot.anchor_to_buffer_anchor(self.selections.newest_anchor().head()) else { return; }; + let Some(buffer) = self.buffer.read(cx).buffer(position.buffer_id) else { + return; + }; let extension = buffer .read(cx) @@ -8687,17 +8724,16 @@ impl Editor { } let selection = self.selections.newest_anchor(); - let cursor = selection.head(); let multibuffer = self.buffer.read(cx).snapshot(cx); + let cursor = selection.head(); + let (cursor_text_anchor, _) = multibuffer.anchor_to_buffer_anchor(cursor)?; + let buffer = self.buffer.read(cx).buffer(cursor_text_anchor.buffer_id)?; // Check project-level disable_ai setting for the current buffer - if let Some((buffer, _)) = self.buffer.read(cx).text_anchor_for_position(cursor, cx) { - if DisableAiSettings::is_ai_disabled_for_buffer(Some(&buffer), cx) { - return None; - } + if DisableAiSettings::is_ai_disabled_for_buffer(Some(&buffer), cx) { + return None; } let offset_selection = selection.map(|endpoint| endpoint.to_offset(&multibuffer)); - let excerpt_id = cursor.excerpt_id; let show_in_menu = self.show_edit_predictions_in_menu(); let completions_menu_has_precedence = !show_in_menu @@ -8728,11 +8764,8 @@ impl Editor { return None; }; - let (buffer, cursor_buffer_position) = - self.buffer.read(cx).text_anchor_for_position(cursor, cx)?; - self.edit_prediction_settings = - self.edit_prediction_settings_at_position(&buffer, cursor_buffer_position, cx); + self.edit_prediction_settings_at_position(&buffer, cursor_text_anchor, cx); self.in_leading_whitespace = multibuffer.is_line_whitespace_upto(cursor); @@ -8755,7 +8788,7 @@ impl Editor { } } - let edit_prediction = provider.suggest(&buffer, cursor_buffer_position, cx)?; + let edit_prediction = provider.suggest(&buffer, cursor_text_anchor, cx)?; let (completion_id, edits, predicted_cursor_position, edit_preview) = match edit_prediction { @@ -8789,7 +8822,7 @@ impl Editor { .into_iter() .flat_map(|(range, new_text)| { Some(( - multibuffer.anchor_range_in_excerpt(excerpt_id, range)?, + multibuffer.buffer_anchor_range_to_anchor_range(range)?, new_text, )) }) @@ -8799,7 +8832,7 @@ impl Editor { } let cursor_position = predicted_cursor_position.and_then(|predicted| { - let anchor = multibuffer.anchor_in_excerpt(excerpt_id, predicted.anchor)?; + let anchor = multibuffer.anchor_in_excerpt(predicted.anchor)?; Some((anchor, predicted.offset)) }); @@ -8813,7 +8846,9 @@ impl Editor { let cursor_row = cursor.to_point(&multibuffer).row; - let snapshot = multibuffer.buffer_for_excerpt(excerpt_id).cloned()?; + let snapshot = multibuffer + .buffer_for_id(cursor_text_anchor.buffer_id) + .cloned()?; let mut inlay_ids = Vec::new(); let invalidation_row_range; @@ -8960,20 +8995,14 @@ impl Editor { let snapshot = self.snapshot(window, cx); let multi_buffer_snapshot = snapshot.buffer_snapshot(); - let Some(project) = self.project() else { - return breakpoint_display_points; - }; let range = snapshot.display_point_to_point(DisplayPoint::new(range.start, 0), Bias::Left) ..snapshot.display_point_to_point(DisplayPoint::new(range.end, 0), Bias::Right); - for (buffer_snapshot, range, excerpt_id) in - multi_buffer_snapshot.range_to_buffer_ranges(range.start..=range.end) + for (buffer_snapshot, range, _) in + multi_buffer_snapshot.range_to_buffer_ranges(range.start..range.end) { - let Some(buffer) = project - .read(cx) - .buffer_for_id(buffer_snapshot.remote_id(), cx) - else { + let Some(buffer) = self.buffer().read(cx).buffer(buffer_snapshot.remote_id()) else { continue; }; let breakpoints = breakpoint_store.read(cx).breakpoints( @@ -8982,11 +9011,15 @@ impl Editor { buffer_snapshot.anchor_before(range.start) ..buffer_snapshot.anchor_after(range.end), ), - buffer_snapshot, + &buffer_snapshot, cx, ); for (breakpoint, state) in breakpoints { - let multi_buffer_anchor = Anchor::in_buffer(excerpt_id, breakpoint.position); + let Some(multi_buffer_anchor) = + multi_buffer_snapshot.anchor_in_excerpt(breakpoint.position) + else { + continue; + }; let position = multi_buffer_anchor .to_point(&multi_buffer_snapshot) .to_display_point(&snapshot); @@ -9764,7 +9797,14 @@ impl Editor { } let highlighted_edits = if let Some(edit_preview) = edit_preview.as_ref() { - crate::edit_prediction_edit_text(snapshot, edits, edit_preview, false, cx) + crate::edit_prediction_edit_text( + snapshot, + edits, + edit_preview, + false, + editor_snapshot.buffer_snapshot(), + cx, + ) } else { // Fallback for providers without edit_preview crate::edit_prediction_fallback_text(edits, cx) @@ -10204,7 +10244,8 @@ impl Editor { .child(div().px_1p5().child(match &prediction.completion { EditPrediction::MoveWithin { target, snapshot } => { use text::ToPoint as _; - if target.text_anchor.to_point(snapshot).row > cursor_point.row + if target.text_anchor_in(&snapshot).to_point(snapshot).row + > cursor_point.row { Icon::new(icons.down) } else { @@ -10418,19 +10459,18 @@ impl Editor { if !supports_jump { return None; } + let (target, _) = self.display_snapshot(cx).anchor_to_buffer_anchor(*target)?; Some( h_flex() .px_2() .gap_2() .flex_1() - .child( - if target.text_anchor.to_point(snapshot).row > cursor_point.row { - Icon::new(icons.down) - } else { - Icon::new(icons.up) - }, - ) + .child(if target.to_point(snapshot).row > cursor_point.row { + Icon::new(icons.down) + } else { + Icon::new(icons.up) + }) .child(Label::new("Jump to Edit")), ) } @@ -10454,12 +10494,24 @@ impl Editor { snapshot, .. } => { - let first_edit_row = edits.first()?.0.start.text_anchor.to_point(snapshot).row; + let first_edit_row = self + .display_snapshot(cx) + .anchor_to_buffer_anchor(edits.first()?.0.start)? + .0 + .to_point(snapshot) + .row; let (highlighted_edits, has_more_lines) = if let Some(edit_preview) = edit_preview.as_ref() { - crate::edit_prediction_edit_text(snapshot, edits, edit_preview, true, cx) - .first_line_preview() + crate::edit_prediction_edit_text( + snapshot, + edits, + edit_preview, + true, + &self.display_snapshot(cx), + cx, + ) + .first_line_preview() } else { crate::edit_prediction_fallback_text(edits, cx).first_line_preview() }; @@ -10554,21 +10606,15 @@ impl Editor { selection: Range, cx: &mut Context, ) { - let Some((_, buffer, _)) = self - .buffer() - .read(cx) - .excerpt_containing(selection.start, cx) + let buffer_snapshot = self.buffer.read(cx).snapshot(cx); + let Some((buffer_snapshot, range)) = + buffer_snapshot.anchor_range_to_buffer_anchor_range(selection.clone()) else { return; }; - let Some((_, end_buffer, _)) = self.buffer().read(cx).excerpt_containing(selection.end, cx) - else { + let Some(buffer) = self.buffer.read(cx).buffer(buffer_snapshot.remote_id()) else { return; }; - if buffer != end_buffer { - log::error!("expected anchor range to have matching buffer IDs"); - return; - } let id = post_inc(&mut self.next_completion_id); let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order; @@ -10579,7 +10625,8 @@ impl Editor { id, true, choices, - selection, + selection.start, + range, buffer, old_menu.map(|menu| menu.primary_scroll_handle()), snippet_sort_order, @@ -11694,10 +11741,9 @@ impl Editor { buffer_ids.extend(snapshot.buffer_ids_for_range(selection.range())) } - let buffer = self.buffer().read(cx); let ranges = buffer_ids .into_iter() - .flat_map(|buffer_id| buffer.excerpt_ranges_for_buffer(buffer_id, cx)) + .flat_map(|buffer_id| snapshot.range_for_buffer(buffer_id)) .collect::>(); self.restore_hunks_in_ranges(ranges, window, cx); @@ -11767,8 +11813,11 @@ impl Editor { let hunks = self.snapshot(window, cx).hunks_for_ranges(ranges); self.transact(window, cx, |editor, window, cx| { editor.restore_diff_hunks(hunks, cx); - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |selections| { - selections.refresh() + let selections = editor + .selections + .all::(&editor.display_snapshot(cx)); + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select(selections); }); }); } @@ -11822,7 +11871,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - if let Some(working_directory) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + if let Some(working_directory) = self.active_buffer(cx).and_then(|buffer| { let project_path = buffer.read(cx).project_path(cx)?; let project = self.project()?.read(cx); let entry = project.entry_for_path(&project_path, cx)?; @@ -11934,22 +11983,19 @@ impl Editor { snapshot: &EditorSnapshot, cx: &mut Context, ) -> Option<(Anchor, Breakpoint)> { - let buffer = self - .buffer - .read(cx) - .buffer_for_anchor(breakpoint_position, cx)?; + let (breakpoint_position, _) = snapshot + .buffer_snapshot() + .anchor_to_buffer_anchor(breakpoint_position)?; + let buffer = self.buffer.read(cx).buffer(breakpoint_position.buffer_id)?; - let enclosing_excerpt = breakpoint_position.excerpt_id; let buffer_snapshot = buffer.read(cx).snapshot(); let row = buffer_snapshot - .summary_for_anchor::(&breakpoint_position.text_anchor) + .summary_for_anchor::(&breakpoint_position) .row; - let line_len = snapshot.buffer_snapshot().line_len(MultiBufferRow(row)); - let anchor_end = snapshot - .buffer_snapshot() - .anchor_after(Point::new(row, line_len)); + let line_len = buffer_snapshot.line_len(row); + let anchor_end = buffer_snapshot.anchor_after(Point::new(row, line_len)); self.breakpoint_store .as_ref()? @@ -11957,7 +12003,7 @@ impl Editor { breakpoint_store .breakpoints( &buffer, - Some(breakpoint_position.text_anchor..anchor_end.text_anchor), + Some(breakpoint_position..anchor_end), &buffer_snapshot, cx, ) @@ -11970,7 +12016,7 @@ impl Editor { if breakpoint_row == row { snapshot .buffer_snapshot() - .anchor_in_excerpt(enclosing_excerpt, bp.position) + .anchor_in_excerpt(bp.position) .map(|position| (position, bp.bp.clone())) } else { None @@ -12246,20 +12292,20 @@ impl Editor { let Some(breakpoint_store) = &self.breakpoint_store else { return; }; - - let Some(buffer) = self - .buffer - .read(cx) - .buffer_for_anchor(breakpoint_position, cx) + let buffer_snapshot = self.buffer.read(cx).snapshot(cx); + let Some((position, _)) = buffer_snapshot.anchor_to_buffer_anchor(breakpoint_position) else { return; }; + let Some(buffer) = self.buffer.read(cx).buffer(position.buffer_id) else { + return; + }; breakpoint_store.update(cx, |breakpoint_store, cx| { breakpoint_store.toggle_breakpoint( buffer, BreakpointWithPosition { - position: breakpoint_position.text_anchor, + position, bp: breakpoint, }, edit_action, @@ -15484,7 +15530,7 @@ impl Editor { } self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { - s.select_ranges(vec![Anchor::min()..Anchor::min()]); + s.select_ranges(vec![Anchor::Min..Anchor::Min]); }); } @@ -15601,7 +15647,7 @@ impl Editor { pub fn select_all(&mut self, _: &SelectAll, window: &mut Window, cx: &mut Context) { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([Anchor::min()..Anchor::max()]); + s.select_ranges(vec![Anchor::Min..Anchor::Max]); }); } @@ -17026,10 +17072,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - let old_selections: Box<[_]> = self - .selections - .all::(&self.display_snapshot(cx)) - .into(); + let old_selections = self.selections.all_anchors(&self.display_snapshot(cx)); if old_selections.is_empty() { return; } @@ -17042,21 +17085,25 @@ impl Editor { let new_selections = old_selections .iter() .map(|selection| { - let old_range = selection.start..selection.end; - let old_range = - old_range.start.to_offset(&buffer)..old_range.end.to_offset(&buffer); - let excerpt = buffer.excerpt_containing(old_range.clone()); - - if let Some(mut excerpt) = excerpt - && let Some(node) = excerpt - .buffer() - .syntax_next_sibling(excerpt.map_range_to_buffer(old_range)) + selection.start.to_offset(&buffer)..selection.end.to_offset(&buffer); + if let Some(results) = buffer.map_excerpt_ranges( + old_range, + |buf, _excerpt_range, input_buffer_range| { + let Some(node) = buf.syntax_next_sibling(input_buffer_range) else { + return Vec::new(); + }; + vec![( + BufferOffset(node.byte_range().start) + ..BufferOffset(node.byte_range().end), + (), + )] + }, + ) && let [(new_range, _)] = results.as_slice() { - let new_range = excerpt.map_range_from_buffer( - BufferOffset(node.byte_range().start)..BufferOffset(node.byte_range().end), - ); selected_sibling = true; + let new_range = + buffer.anchor_after(new_range.start)..buffer.anchor_before(new_range.end); Selection { id: selection.id, start: new_range.start, @@ -17088,36 +17135,35 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - let old_selections: Box<[_]> = self - .selections - .all::(&self.display_snapshot(cx)) - .into(); - if old_selections.is_empty() { - return; - } + let old_selections: Arc<[_]> = self.selections.all_anchors(&self.display_snapshot(cx)); self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); - let buffer = self.buffer.read(cx).snapshot(cx); + let multibuffer_snapshot = self.buffer.read(cx).snapshot(cx); let mut selected_sibling = false; let new_selections = old_selections .iter() .map(|selection| { - let old_range = selection.start..selection.end; - let old_range = - old_range.start.to_offset(&buffer)..old_range.end.to_offset(&buffer); - let excerpt = buffer.excerpt_containing(old_range.clone()); - - if let Some(mut excerpt) = excerpt - && let Some(node) = excerpt - .buffer() - .syntax_prev_sibling(excerpt.map_range_to_buffer(old_range)) + let old_range = selection.start.to_offset(&multibuffer_snapshot) + ..selection.end.to_offset(&multibuffer_snapshot); + if let Some(results) = multibuffer_snapshot.map_excerpt_ranges( + old_range, + |buf, _excerpt_range, input_buffer_range| { + let Some(node) = buf.syntax_prev_sibling(input_buffer_range) else { + return Vec::new(); + }; + vec![( + BufferOffset(node.byte_range().start) + ..BufferOffset(node.byte_range().end), + (), + )] + }, + ) && let [(new_range, _)] = results.as_slice() { - let new_range = excerpt.map_range_from_buffer( - BufferOffset(node.byte_range().start)..BufferOffset(node.byte_range().end), - ); selected_sibling = true; + let new_range = multibuffer_snapshot.anchor_after(new_range.start) + ..multibuffer_snapshot.anchor_before(new_range.end); Selection { id: selection.id, start: new_range.start, @@ -17474,16 +17520,21 @@ impl Editor { }; let snapshot = self.buffer.read(cx).snapshot(cx); - let excerpt_ids = selections + let excerpt_anchors = selections .iter() - .flat_map(|selection| snapshot.excerpt_ids_for_range(selection.range())) - .unique() - .sorted() + .flat_map(|selection| { + snapshot + .range_to_buffer_ranges(selection.range()) + .into_iter() + .filter_map(|(buffer_snapshot, range, _)| { + snapshot.anchor_in_excerpt(buffer_snapshot.anchor_after(range.start)) + }) + }) .collect::>(); if self.delegate_expand_excerpts { cx.emit(EditorEvent::ExpandExcerptsRequested { - excerpt_ids, + excerpt_anchors, lines, direction, }); @@ -17491,13 +17542,13 @@ impl Editor { } self.buffer.update(cx, |buffer, cx| { - buffer.expand_excerpts(excerpt_ids, lines, direction, cx) + buffer.expand_excerpts(excerpt_anchors, lines, direction, cx) }) } - pub fn expand_excerpt( + pub(crate) fn expand_excerpt( &mut self, - excerpt: ExcerptId, + excerpt_anchor: Anchor, direction: ExpandExcerptDirection, window: &mut Window, cx: &mut Context, @@ -17506,7 +17557,7 @@ impl Editor { if self.delegate_expand_excerpts { cx.emit(EditorEvent::ExpandExcerptsRequested { - excerpt_ids: vec![excerpt], + excerpt_anchors: vec![excerpt_anchor], lines: lines_to_expand, direction, }); @@ -17519,12 +17570,11 @@ impl Editor { if direction == ExpandExcerptDirection::Down { let multi_buffer = self.buffer.read(cx); let snapshot = multi_buffer.snapshot(cx); - if let Some(buffer_id) = snapshot.buffer_id_for_excerpt(excerpt) - && let Some(buffer) = multi_buffer.buffer(buffer_id) - && let Some(excerpt_range) = snapshot.context_range_for_excerpt(excerpt) + if let Some((buffer_snapshot, excerpt_range)) = + snapshot.excerpt_containing(excerpt_anchor..excerpt_anchor) { - let buffer_snapshot = buffer.read(cx).snapshot(); - let excerpt_end_row = Point::from_anchor(&excerpt_range.end, &buffer_snapshot).row; + let excerpt_end_row = + Point::from_anchor(&excerpt_range.context.end, &buffer_snapshot).row; let last_row = buffer_snapshot.max_point().row; let lines_below = last_row.saturating_sub(excerpt_end_row); if lines_below >= lines_to_expand { @@ -17540,14 +17590,14 @@ impl Editor { .buffer .read(cx) .snapshot(cx) - .excerpt_before(excerpt) + .excerpt_before(excerpt_anchor) .is_none() { scroll = Some(current_scroll_position); } self.buffer.update(cx, |buffer, cx| { - buffer.expand_excerpts([excerpt], lines_to_expand, direction, cx) + buffer.expand_excerpts([excerpt_anchor], lines_to_expand, direction, cx) }); if let Some(new_scroll_position) = scroll { @@ -17571,20 +17621,15 @@ impl Editor { cx: &mut Context, ) { let multibuffer = self.buffer().read(cx); - let Some(buffer) = multibuffer.as_singleton() else { - return; - }; - let Some(start) = multibuffer.buffer_point_to_anchor(&buffer, range.start, cx) else { - return; - }; - let Some(end) = multibuffer.buffer_point_to_anchor(&buffer, range.end, cx) else { + if !multibuffer.is_singleton() { return; }; + let anchor_range = range.to_anchors(&multibuffer.snapshot(cx)); self.change_selections( SelectionEffects::default().nav_history(true), window, cx, - |s| s.select_anchor_ranges([start..end]), + |s| s.select_anchor_ranges([anchor_range]), ); } @@ -17685,9 +17730,10 @@ impl Editor { }; let next_diagnostic_start = buffer.anchor_after(next_diagnostic.range.start); - let Some(buffer_id) = buffer.buffer_id_for_anchor(next_diagnostic_start) else { + let Some((buffer_anchor, _)) = buffer.anchor_to_buffer_anchor(next_diagnostic_start) else { return; }; + let buffer_id = buffer_anchor.buffer_id; let snapshot = self.snapshot(window, cx); if snapshot.intersects_fold(next_diagnostic.range.start) { self.unfold_ranges( @@ -18560,9 +18606,9 @@ impl Editor { let editor_snapshot = self.snapshot(window, cx); // We don't care about multi-buffer symbols - let Some((excerpt_id, _, _)) = editor_snapshot.as_singleton() else { + if !editor_snapshot.is_singleton() { return Task::ready(Ok(())); - }; + } let cursor_offset = self .selections @@ -18582,7 +18628,11 @@ impl Editor { let multi_snapshot = editor_snapshot.buffer(); let buffer_range = |range: &Range<_>| { - Anchor::range_in_buffer(excerpt_id, range.clone()).to_offset(multi_snapshot) + Some( + multi_snapshot + .buffer_anchor_range_to_anchor_range(range.clone())? + .to_offset(multi_snapshot), + ) }; wcx.update_window(wcx.window_handle(), |_, window, acx| { @@ -18591,7 +18641,7 @@ impl Editor { .enumerate() .filter_map(|(idx, item)| { // Find the closest outline item by distance between outline text and cursor location - let source_range = buffer_range(&item.source_range_for_text); + let source_range = buffer_range(&item.source_range_for_text)?; let distance_to_closest_endpoint = cmp::min( (source_range.start.0 as isize - cursor_offset.0 as isize).abs(), (source_range.end.0 as isize - cursor_offset.0 as isize).abs(), @@ -18616,7 +18666,9 @@ impl Editor { return; }; - let range = buffer_range(&outline_items[idx].source_range_for_text); + let Some(range) = buffer_range(&outline_items[idx].source_range_for_text) else { + return; + }; let selection = [range.start..range.start]; let _ = editor @@ -18686,24 +18738,15 @@ impl Editor { let (locations, current_location_index) = multi_buffer.update(cx, |multi_buffer, cx| { + let multi_buffer_snapshot = multi_buffer.snapshot(cx); let mut locations = locations .into_iter() .filter_map(|loc| { - let start = multi_buffer.buffer_anchor_to_anchor( - &loc.buffer, - loc.range.start, - cx, - )?; - let end = multi_buffer.buffer_anchor_to_anchor( - &loc.buffer, - loc.range.end, - cx, - )?; + let start = multi_buffer_snapshot.anchor_in_excerpt(loc.range.start)?; + let end = multi_buffer_snapshot.anchor_in_excerpt(loc.range.end)?; Some(start..end) }) .collect::>(); - - let multi_buffer_snapshot = multi_buffer.snapshot(cx); // There is an O(n) implementation, but given this list will be // small (usually <100 items), the extra O(log(n)) factor isn't // worth the (surprisingly large amount of) extra complexity. @@ -18959,14 +19002,21 @@ impl Editor { for (buffer, mut ranges_for_buffer) in locations { ranges_for_buffer.sort_by_key(|range| (range.start, Reverse(range.end))); key.push((buffer.read(cx).remote_id(), ranges_for_buffer.clone())); - let (new_ranges, _) = multibuffer.set_excerpts_for_path( + multibuffer.set_excerpts_for_path( PathKey::for_buffer(&buffer, cx), buffer.clone(), - ranges_for_buffer, + ranges_for_buffer.clone(), multibuffer_context_lines(cx), cx, ); - ranges.extend(new_ranges) + let snapshot = multibuffer.snapshot(cx); + let buffer_snapshot = buffer.read(cx).snapshot(); + ranges.extend(ranges_for_buffer.into_iter().filter_map(|range| { + let text_range = buffer_snapshot.anchor_range_inside(range); + let start = snapshot.anchor_in_buffer(text_range.start)?; + let end = snapshot.anchor_in_buffer(text_range.end)?; + Some(start..end) + })) } multibuffer.with_title(title) @@ -19074,28 +19124,11 @@ impl Editor { let snapshot = cursor_buffer.read(cx).snapshot(); let cursor_buffer_offset = cursor_buffer_position.to_offset(&snapshot); let cursor_buffer_offset_end = cursor_buffer_position_end.to_offset(&snapshot); - let prepare_rename = provider - .range_for_rename(&cursor_buffer, cursor_buffer_position, cx) - .unwrap_or_else(|| Task::ready(Ok(None))); + let prepare_rename = provider.range_for_rename(&cursor_buffer, cursor_buffer_position, cx); drop(snapshot); Some(cx.spawn_in(window, async move |this, cx| { - let rename_range = if let Some(range) = prepare_rename.await? { - Some(range) - } else { - this.update(cx, |this, cx| { - let buffer = this.buffer.read(cx).snapshot(cx); - let mut buffer_highlights = this - .document_highlights_for_position(selection.head(), &buffer) - .filter(|highlight| { - highlight.start.excerpt_id == selection.head().excerpt_id - && highlight.end.excerpt_id == selection.head().excerpt_id - }); - buffer_highlights - .next() - .map(|highlight| highlight.start.text_anchor..highlight.end.text_anchor) - })? - }; + let rename_range = prepare_rename.await?; if let Some(rename_range) = rename_range { this.update_in(cx, |this, window, cx| { let snapshot = cursor_buffer.read(cx).snapshot(); @@ -19417,12 +19450,12 @@ impl Editor { let mut buffer_id_to_ranges: BTreeMap>> = BTreeMap::new(); for selection_range in selection_ranges { - for (buffer, buffer_range, _) in - snapshot.range_to_buffer_ranges(selection_range.start..=selection_range.end) + for (buffer_snapshot, buffer_range, _) in + snapshot.range_to_buffer_ranges(selection_range.start..selection_range.end) { - let buffer_id = buffer.remote_id(); - let start = buffer.anchor_before(buffer_range.start); - let end = buffer.anchor_after(buffer_range.end); + let buffer_id = buffer_snapshot.remote_id(); + let start = buffer_snapshot.anchor_before(buffer_range.start); + let end = buffer_snapshot.anchor_after(buffer_range.end); buffers.insert(multi_buffer.buffer(buffer_id).unwrap()); buffer_id_to_ranges .entry(buffer_id) @@ -20200,10 +20233,10 @@ impl Editor { .is_some(); has_folds } else { - let buffer_ids = self.buffer.read(cx).excerpt_buffer_ids(); - let has_folds = buffer_ids - .iter() - .any(|buffer_id| self.is_buffer_folded(*buffer_id, cx)); + let snapshot = self.buffer.read(cx).snapshot(cx); + let has_folds = snapshot + .all_buffer_ids() + .any(|buffer_id| self.is_buffer_folded(buffer_id, cx)); has_folds }; @@ -20368,7 +20401,8 @@ impl Editor { self.toggle_fold_multiple_buffers = cx.spawn_in(window, async move |editor, cx| { editor .update_in(cx, |editor, _, cx| { - for buffer_id in editor.buffer.read(cx).excerpt_buffer_ids() { + let snapshot = editor.buffer.read(cx).snapshot(cx); + for buffer_id in snapshot.all_buffer_ids() { editor.fold_buffer(buffer_id, cx); } }) @@ -20556,7 +20590,8 @@ impl Editor { self.toggle_fold_multiple_buffers = cx.spawn(async move |editor, cx| { editor .update(cx, |editor, cx| { - for buffer_id in editor.buffer.read(cx).excerpt_buffer_ids() { + let snapshot = editor.buffer.read(cx).snapshot(cx); + for buffer_id in snapshot.all_buffer_ids() { editor.unfold_buffer(buffer_id, cx); } }) @@ -20655,25 +20690,19 @@ impl Editor { return; } - let mut all_folded_excerpt_ids = Vec::new(); - for buffer_id in &ids_to_fold { - let folded_excerpts = self.buffer().read(cx).excerpts_for_buffer(*buffer_id, cx); - all_folded_excerpt_ids.extend(folded_excerpts.into_iter().map(|(id, _, _)| id)); - } - self.display_map.update(cx, |display_map, cx| { display_map.fold_buffers(ids_to_fold.clone(), cx) }); let snapshot = self.display_snapshot(cx); self.selections.change_with(&snapshot, |selections| { - for buffer_id in ids_to_fold { + for buffer_id in ids_to_fold.iter().copied() { selections.remove_selections_from_buffer(buffer_id); } }); cx.emit(EditorEvent::BufferFoldToggled { - ids: all_folded_excerpt_ids, + ids: ids_to_fold, folded: true, }); cx.notify(); @@ -20683,12 +20712,11 @@ impl Editor { if self.buffer().read(cx).is_singleton() || !self.is_buffer_folded(buffer_id, cx) { return; } - let unfolded_excerpts = self.buffer().read(cx).excerpts_for_buffer(buffer_id, cx); self.display_map.update(cx, |display_map, cx| { display_map.unfold_buffers([buffer_id], cx); }); cx.emit(EditorEvent::BufferFoldToggled { - ids: unfolded_excerpts.iter().map(|&(id, _, _)| id).collect(), + ids: vec![buffer_id], folded: false, }); cx.notify(); @@ -20741,14 +20769,6 @@ impl Editor { return; } - let mut buffers_affected = HashSet::default(); - let multi_buffer = self.buffer().read(cx); - for range in ranges { - if let Some((_, buffer, _)) = multi_buffer.excerpt_containing(range.start.clone(), cx) { - buffers_affected.insert(buffer.read(cx).remote_id()); - }; - } - self.display_map.update(cx, update); if auto_scroll { @@ -20786,7 +20806,7 @@ impl Editor { cx: &mut Context, ) { self.buffer.update(cx, |buffer, cx| { - buffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx) + buffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx) }); } @@ -20797,7 +20817,7 @@ impl Editor { cx: &mut Context, ) { self.buffer.update(cx, |buffer, cx| { - buffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx) + buffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx) }); } @@ -20822,7 +20842,7 @@ impl Editor { buffer: &'a MultiBufferSnapshot, ) -> impl 'a + Iterator { ranges.iter().flat_map(move |range| { - let end_excerpt_id = range.end.excerpt_id; + let end_excerpt = buffer.excerpt_containing(range.end..range.end); let range = range.to_point(buffer); let mut peek_end = range.end; if range.end.row < buffer.max_row().0 { @@ -20830,7 +20850,19 @@ impl Editor { } buffer .diff_hunks_in_range(range.start..peek_end) - .filter(move |hunk| hunk.excerpt_id.cmp(&end_excerpt_id, buffer).is_le()) + .filter(move |hunk| { + if let Some((_, excerpt_range)) = &end_excerpt + && let Some(end_anchor) = + buffer.anchor_in_excerpt(excerpt_range.context.end) + && let Some(hunk_end_anchor) = + buffer.anchor_in_excerpt(hunk.excerpt_range.context.end) + && hunk_end_anchor.cmp(&end_anchor, buffer).is_gt() + { + false + } else { + true + } + }) }) } @@ -21032,7 +21064,7 @@ impl Editor { pub fn clear_expanded_diff_hunks(&mut self, cx: &mut Context) -> bool { self.buffer.update(cx, |buffer, cx| { - let ranges = vec![Anchor::min()..Anchor::max()]; + let ranges = vec![Anchor::Min..Anchor::Max]; if !buffer.all_diff_hunks_expanded() && buffer.has_expanded_diff_hunks_in_ranges(&ranges, cx) { @@ -21048,7 +21080,7 @@ impl Editor { if self.buffer.read(cx).all_diff_hunks_expanded() { return true; } - let ranges = vec![Anchor::min()..Anchor::max()]; + let ranges = vec![Anchor::Min..Anchor::Max]; self.buffer .read(cx) .has_expanded_diff_hunks_in_ranges(&ranges, cx) @@ -22103,11 +22135,11 @@ impl Editor { let end_point = overlay.anchor_range.end.to_point(&snapshot); let start_row = snapshot .point_to_buffer_point(start_point) - .map(|(_, p, _)| p.row) + .map(|(_, p)| p.row) .unwrap_or(start_point.row); let end_row = snapshot .point_to_buffer_point(end_point) - .map(|(_, p, _)| p.row) + .map(|(_, p)| p.row) .unwrap_or(end_point.row); Some((start_row, end_row)) } @@ -22607,9 +22639,9 @@ impl Editor { snapshot.range_to_buffer_ranges(start_point..end_point); let ranges: Vec<(u32, u32)> = buffer_ranges .iter() - .map(|(buffer, range, _)| { - let start = buffer.offset_to_point(range.start.0).row; - let end = buffer.offset_to_point(range.end.0).row; + .map(|(buffer_snapshot, range, _)| { + let start = buffer_snapshot.offset_to_point(range.start.0).row; + let end = buffer_snapshot.offset_to_point(range.end.0).row; (start, end) }) .collect(); @@ -22935,15 +22967,14 @@ impl Editor { } fn target_file<'a>(&self, cx: &'a App) -> Option<&'a dyn language::LocalFile> { - self.active_excerpt(cx)? - .1 + self.active_buffer(cx)? .read(cx) .file() .and_then(|f| f.as_local()) } pub fn target_file_abs_path(&self, cx: &mut Context) -> Option { - self.active_excerpt(cx).and_then(|(_, buffer, _)| { + self.active_buffer(cx).and_then(|buffer| { let buffer = buffer.read(cx); if let Some(project_path) = buffer.project_path(cx) { let project = self.project()?.read(cx); @@ -22992,7 +23023,7 @@ impl Editor { _window: &mut Window, cx: &mut Context, ) { - if let Some(path) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + if let Some(path) = self.active_buffer(cx).and_then(|buffer| { let project = self.project()?.read(cx); let path = buffer.read(cx).file()?.path(); let path = path.display(project.path_style(cx)); @@ -23050,41 +23081,22 @@ impl Editor { } let position = active_stack_frame.position; - let buffer_id = position.buffer_id?; - let snapshot = self - .project - .as_ref()? - .read(cx) - .buffer_for_id(buffer_id, cx)? - .read(cx) - .snapshot(); - let mut handled = false; - for (id, _, ExcerptRange { context, .. }) in - self.buffer.read(cx).excerpts_for_buffer(buffer_id, cx) - { - if context.start.cmp(&position, &snapshot).is_ge() - || context.end.cmp(&position, &snapshot).is_lt() - { - continue; - } - let snapshot = self.buffer.read(cx).snapshot(cx); - let multibuffer_anchor = snapshot.anchor_in_excerpt(id, position)?; + let snapshot = self.buffer.read(cx).snapshot(cx); + let multibuffer_anchor = snapshot.anchor_in_excerpt(position)?; - handled = true; - self.clear_row_highlights::(); + self.clear_row_highlights::(); - self.go_to_line::( - multibuffer_anchor, - Some(cx.theme().colors().editor_debugger_active_line_background), - window, - cx, - ); + self.go_to_line::( + multibuffer_anchor, + Some(cx.theme().colors().editor_debugger_active_line_background), + window, + cx, + ); - cx.notify(); - } + cx.notify(); - handled.then_some(()) + Some(()) }) .is_some() } @@ -23095,7 +23107,7 @@ impl Editor { _: &mut Window, cx: &mut Context, ) { - if let Some(file_stem) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + if let Some(file_stem) = self.active_buffer(cx).and_then(|buffer| { let file = buffer.read(cx).file()?; file.path().file_stem() }) { @@ -23104,7 +23116,7 @@ impl Editor { } pub fn copy_file_name(&mut self, _: &CopyFileName, _: &mut Window, cx: &mut Context) { - if let Some(file_name) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + if let Some(file_name) = self.active_buffer(cx).and_then(|buffer| { let file = buffer.read(cx).file()?; Some(file.file_name(cx)) }) { @@ -23157,7 +23169,7 @@ impl Editor { .selections .newest::(&snapshot.display_snapshot) .head(); - let (buffer, point, _) = snapshot.buffer_snapshot().point_to_buffer_point(cursor)?; + let (buffer, point) = snapshot.buffer_snapshot().point_to_buffer_point(cursor)?; let (_, blame_entry) = blame .update(cx, |blame, cx| { blame @@ -23304,33 +23316,28 @@ impl Editor { let multi_buffer = self.buffer().read(cx); let multi_buffer_snapshot = multi_buffer.snapshot(cx); let buffer_ranges = multi_buffer_snapshot - .range_to_buffer_ranges(selection_range.start..=selection_range.end); + .range_to_buffer_ranges(selection_range.start..selection_range.end); - let (buffer, range, _) = if selection.reversed { + let (buffer_snapshot, range, _) = if selection.reversed { buffer_ranges.first() } else { buffer_ranges.last() }?; - let buffer_range = range.to_point(buffer); + let buffer_range = range.to_point(buffer_snapshot); + let buffer = multi_buffer.buffer(buffer_snapshot.remote_id()).unwrap(); - let Some(buffer_diff) = multi_buffer.diff_for(buffer.remote_id()) else { - return Some(( - multi_buffer.buffer(buffer.remote_id()).unwrap(), - buffer_range.start.row..buffer_range.end.row, - )); + let Some(buffer_diff) = multi_buffer.diff_for(buffer_snapshot.remote_id()) else { + return Some((buffer, buffer_range.start.row..buffer_range.end.row)); }; let buffer_diff_snapshot = buffer_diff.read(cx).snapshot(cx); - let start = - buffer_diff_snapshot.buffer_point_to_base_text_point(buffer_range.start, buffer); - let end = - buffer_diff_snapshot.buffer_point_to_base_text_point(buffer_range.end, buffer); + let start = buffer_diff_snapshot + .buffer_point_to_base_text_point(buffer_range.start, &buffer_snapshot); + let end = buffer_diff_snapshot + .buffer_point_to_base_text_point(buffer_range.end, &buffer_snapshot); - Some(( - multi_buffer.buffer(buffer.remote_id()).unwrap(), - start.row..end.row, - )) + Some((buffer, start.row..end.row)) }); let Some((buffer, selection)) = buffer_and_selection else { @@ -23404,7 +23411,7 @@ impl Editor { end_line }; - if let Some(file_location) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { + if let Some(file_location) = self.active_buffer(cx).and_then(|buffer| { let project = self.project()?.read(cx); let file = buffer.read(cx).file()?; let path = file.path().display(project.path_style(cx)); @@ -23505,6 +23512,7 @@ impl Editor { let Some(buffer) = multibuffer.as_singleton() else { return; }; + let buffer_snapshot = buffer.read(cx).snapshot(); let Some(workspace) = self.workspace() else { return; @@ -23519,7 +23527,8 @@ impl Editor { .map(|selection| { ( buffer.clone(), - (selection.start.text_anchor..selection.end.text_anchor) + (selection.start.text_anchor_in(&buffer_snapshot) + ..selection.end.text_anchor_in(&buffer_snapshot)) .to_point(buffer.read(cx)), ) }) @@ -23688,8 +23697,7 @@ impl Editor { let start = highlight.range.start.to_display_point(&snapshot); let end = highlight.range.end.to_display_point(&snapshot); let start_row = start.row().0; - let end_row = if !highlight.range.end.text_anchor.is_max() && end.column() == 0 - { + let end_row = if !highlight.range.end.is_max() && end.column() == 0 { end.row().0.saturating_sub(1) } else { end.row().0 @@ -23925,42 +23933,6 @@ impl Editor { } } - fn document_highlights_for_position<'a>( - &'a self, - position: Anchor, - buffer: &'a MultiBufferSnapshot, - ) -> impl 'a + Iterator> { - let read_highlights = self - .background_highlights - .get(&HighlightKey::DocumentHighlightRead) - .map(|h| &h.1); - let write_highlights = self - .background_highlights - .get(&HighlightKey::DocumentHighlightWrite) - .map(|h| &h.1); - let left_position = position.bias_left(buffer); - let right_position = position.bias_right(buffer); - read_highlights - .into_iter() - .chain(write_highlights) - .flat_map(move |ranges| { - let start_ix = match ranges.binary_search_by(|probe| { - let cmp = probe.end.cmp(&left_position, buffer); - if cmp.is_ge() { - Ordering::Greater - } else { - Ordering::Less - } - }) { - Ok(i) | Err(i) => i, - }; - - ranges[start_ix..] - .iter() - .take_while(move |range| range.start.cmp(&right_position, buffer).is_le()) - }) - } - pub fn has_background_highlights(&self, key: HighlightKey) -> bool { self.background_highlights .get(&key) @@ -24182,26 +24154,16 @@ impl Editor { return Some(Task::ready(Ok(Vec::new()))); }; - let buffer = editor.buffer.read_with(cx, |buffer, cx| { - let snapshot = buffer.snapshot(cx); - - let excerpt = snapshot.excerpt_containing( - current_execution_position..current_execution_position, - )?; - - editor.buffer.read(cx).buffer(excerpt.buffer_id()) - })?; - - if current_execution_position - .text_anchor - .buffer_id - .is_some_and(|id| id != buffer.read(cx).remote_id()) - { - return Some(Task::ready(Ok(Vec::new()))); - } + let (buffer, buffer_anchor) = + editor.buffer.read_with(cx, |multibuffer, cx| { + let multibuffer_snapshot = multibuffer.snapshot(cx); + let (buffer_anchor, _) = multibuffer_snapshot + .anchor_to_buffer_anchor(current_execution_position)?; + let buffer = multibuffer.buffer(buffer_anchor.buffer_id)?; + Some((buffer, buffer_anchor)) + })?; - let range = - buffer.read(cx).anchor_before(0)..current_execution_position.text_anchor; + let range = buffer.read(cx).anchor_before(0)..buffer_anchor; semantics.inline_values(buffer, range, cx) }) @@ -24215,7 +24177,7 @@ impl Editor { for (buffer_id, inline_value) in inline_values .into_iter() - .filter_map(|hint| Some((hint.position.buffer_id?, hint))) + .map(|hint| (hint.position.buffer_id, hint)) { buffer_inline_values .entry(buffer_id) @@ -24228,22 +24190,20 @@ impl Editor { let snapshot = editor.buffer.read(cx).snapshot(cx); let mut new_inlays = Vec::default(); - for (excerpt_id, buffer_snapshot, _) in snapshot.excerpts() { - let buffer_id = buffer_snapshot.remote_id(); - buffer_inline_values - .get(&buffer_id) - .into_iter() - .flatten() - .for_each(|hint| { - let inlay = Inlay::debugger( - post_inc(&mut editor.next_inlay_id), - Anchor::in_buffer(excerpt_id, hint.position), - hint.text(), - ); - if !inlay.text().chars().contains(&'\n') { - new_inlays.push(inlay); - } - }); + for (_buffer_id, inline_values) in buffer_inline_values { + for hint in inline_values { + let Some(anchor) = snapshot.anchor_in_excerpt(hint.position) else { + continue; + }; + let inlay = Inlay::debugger( + post_inc(&mut editor.next_inlay_id), + anchor, + hint.text(), + ); + if !inlay.text().chars().contains(&'\n') { + new_inlays.push(inlay); + } + } } let mut inlay_ids = new_inlays.iter().map(|inlay| inlay.id).collect(); @@ -24312,11 +24272,12 @@ impl Editor { }; telemetry.log_edit_event("editor", is_via_ssh); } - multi_buffer::Event::ExcerptsAdded { + multi_buffer::Event::BufferRangesUpdated { buffer, - predecessor, - excerpts, + ranges, + path_key, } => { + self.refresh_document_highlights(cx); let buffer_id = buffer.read(cx).remote_id(); if self.buffer.read(cx).diff_for(buffer_id).is_none() && let Some(project) = &self.project @@ -24330,27 +24291,29 @@ impl Editor { ) .detach(); } - self.semantic_token_state - .invalidate_buffer(&buffer.read(cx).remote_id()); + self.register_visible_buffers(cx); self.update_lsp_data(Some(buffer_id), window, cx); self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); self.refresh_runnables(None, window, cx); + self.bracket_fetched_tree_sitter_chunks + .retain(|range, _| range.start.buffer_id != buffer_id); self.colorize_brackets(false, cx); self.refresh_selected_text_highlights(&self.display_snapshot(cx), true, window, cx); - cx.emit(EditorEvent::ExcerptsAdded { + self.semantic_token_state.invalidate_buffer(&buffer_id); + cx.emit(EditorEvent::BufferRangesUpdated { buffer: buffer.clone(), - predecessor: *predecessor, - excerpts: excerpts.clone(), + ranges: ranges.clone(), + path_key: path_key.clone(), }); } - multi_buffer::Event::ExcerptsRemoved { - ids, - removed_buffer_ids, - } => { + multi_buffer::Event::BuffersRemoved { removed_buffer_ids } => { if let Some(inlay_hints) = &mut self.inlay_hints { inlay_hints.remove_inlay_chunk_data(removed_buffer_ids); } - self.refresh_inlay_hints(InlayHintRefreshReason::ExcerptsRemoved(ids.clone()), cx); + self.refresh_inlay_hints( + InlayHintRefreshReason::BuffersRemoved(removed_buffer_ids.clone()), + cx, + ); for buffer_id in removed_buffer_ids { self.registered_buffers.remove(buffer_id); self.clear_runnables(Some(*buffer_id)); @@ -24366,38 +24329,18 @@ impl Editor { }); jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); - cx.emit(EditorEvent::ExcerptsRemoved { - ids: ids.clone(), + cx.emit(EditorEvent::BuffersRemoved { removed_buffer_ids: removed_buffer_ids.clone(), }); } - multi_buffer::Event::ExcerptsEdited { - excerpt_ids, - buffer_ids, - } => { + multi_buffer::Event::BuffersEdited { buffer_ids } => { self.display_map.update(cx, |map, cx| { map.unfold_buffers(buffer_ids.iter().copied(), cx) }); - cx.emit(EditorEvent::ExcerptsEdited { - ids: excerpt_ids.clone(), + cx.emit(EditorEvent::BuffersEdited { + buffer_ids: buffer_ids.clone(), }); } - multi_buffer::Event::ExcerptsExpanded { ids } => { - self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); - self.refresh_document_highlights(cx); - let snapshot = multibuffer.read(cx).snapshot(cx); - for id in ids { - self.bracket_fetched_tree_sitter_chunks.remove(id); - if let Some(buffer) = snapshot.buffer_for_excerpt(*id) { - self.semantic_token_state - .invalidate_buffer(&buffer.remote_id()); - } - } - self.colorize_brackets(false, cx); - self.update_lsp_data(None, window, cx); - self.refresh_runnables(None, window, cx); - cx.emit(EditorEvent::ExcerptsExpanded { ids: ids.clone() }) - } multi_buffer::Event::Reparsed(buffer_id) => { self.refresh_runnables(Some(*buffer_id), window, cx); self.refresh_selected_text_highlights(&self.display_snapshot(cx), true, window, cx); @@ -24700,18 +24643,13 @@ impl Editor { let mut new_selections_by_buffer = HashMap::default(); match &jump_data { Some(JumpData::MultiBufferPoint { - excerpt_id, - position, anchor, + position, line_offset_from_top, }) => { - let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); - if let Some(buffer) = multi_buffer_snapshot - .buffer_id_for_excerpt(*excerpt_id) - .and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)) - { + if let Some(buffer) = self.buffer.read(cx).buffer(anchor.buffer_id) { let buffer_snapshot = buffer.read(cx).snapshot(); - let jump_to_point = if buffer_snapshot.can_resolve(anchor) { + let jump_to_point = if buffer_snapshot.can_resolve(&anchor) { language::ToPoint::to_point(anchor, &buffer_snapshot) } else { buffer_snapshot.clip_point(*position, Bias::Left) @@ -24731,7 +24669,7 @@ impl Editor { line_offset_from_top, }) => { let point = MultiBufferPoint::new(row.0, 0); - if let Some((buffer, buffer_point, _)) = + if let Some((buffer, buffer_point)) = self.buffer.read(cx).point_to_buffer_point(point, cx) { let buffer_offset = buffer.read(cx).point_to_offset(buffer_point); @@ -24747,18 +24685,20 @@ impl Editor { .selections .all::(&self.display_snapshot(cx)); let multi_buffer = self.buffer.read(cx); + let multi_buffer_snapshot = multi_buffer.snapshot(cx); for selection in selections { - for (snapshot, range, _, anchor) in multi_buffer - .snapshot(cx) + for (snapshot, range, anchor) in multi_buffer_snapshot .range_to_buffer_ranges_with_deleted_hunks(selection.range()) { - if let Some(anchor) = anchor { - let Some(buffer_handle) = multi_buffer.buffer_for_anchor(anchor, cx) + if let Some((text_anchor, _)) = anchor.and_then(|anchor| { + multi_buffer_snapshot.anchor_to_buffer_anchor(anchor) + }) { + let Some(buffer_handle) = multi_buffer.buffer(text_anchor.buffer_id) else { continue; }; let offset = text::ToOffset::to_offset( - &anchor.text_anchor, + &text_anchor, &buffer_handle.read(cx).snapshot(), ); let range = BufferOffset(offset)..BufferOffset(offset); @@ -24907,9 +24847,7 @@ impl Editor { }; let nav_history = editor.nav_history.take(); let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx); - let Some((excerpt_id, _, buffer_snapshot)) = - multibuffer_snapshot.as_singleton() - else { + let Some(buffer_snapshot) = multibuffer_snapshot.as_singleton() else { return; }; editor.change_selections( @@ -24921,7 +24859,7 @@ impl Editor { let range = buffer_snapshot.anchor_before(range.start) ..buffer_snapshot.anchor_after(range.end); multibuffer_snapshot - .anchor_range_in_excerpt(excerpt_id, range) + .buffer_anchor_range_to_anchor_range(range) .unwrap() })); }, @@ -25415,8 +25353,11 @@ impl Editor { } } }); - self.change_selections(SelectionEffects::no_scroll(), window, cx, |selections| { - selections.refresh() + let selections = self + .selections + .all::(&self.display_snapshot(cx)); + self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select(selections); }); } @@ -25798,7 +25739,12 @@ impl Editor { if !self.lsp_data_enabled() { return; } - for (_, (visible_buffer, _, _)) in self.visible_excerpts(true, cx) { + let visible_buffers: Vec<_> = self + .visible_buffers(cx) + .into_iter() + .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx)) + .collect(); + for visible_buffer in visible_buffers { self.register_buffer(visible_buffer.read(cx).remote_id(), cx); } } @@ -26090,17 +26036,16 @@ fn process_completion_for_edit( range_to_replace.end = *cursor_position; } - let replace_range = range_to_replace.to_offset(buffer); CompletionEdit { new_text, - replace_range: BufferOffset(replace_range.start)..BufferOffset(replace_range.end), + replace_range: range_to_replace, snippet, } } struct CompletionEdit { new_text: String, - replace_range: Range, + replace_range: Range, snippet: Option, } @@ -26542,10 +26487,10 @@ impl NewlineConfig { range: Range, ) -> bool { let (buffer, range) = match buffer - .range_to_buffer_ranges(range.start..=range.end) + .range_to_buffer_ranges(range.start..range.end) .as_slice() { - [(buffer, range, _)] => (*buffer, range.clone()), + [(buffer_snapshot, range, _)] => (buffer_snapshot.clone(), range.clone()), _ => return false, }; let pair = { @@ -27084,7 +27029,7 @@ pub trait SemanticsProvider { buffer: &Entity, position: text::Anchor, cx: &mut App, - ) -> Option>>>>; + ) -> Task>>>; fn perform_rename( &self, @@ -27098,7 +27043,6 @@ pub trait SemanticsProvider { pub trait CompletionProvider { fn completions( &self, - excerpt_id: ExcerptId, buffer: &Entity, buffer_position: text::Anchor, trigger: CompletionContext, @@ -27167,7 +27111,6 @@ pub trait CodeActionProvider { &self, buffer_handle: Entity, action: CodeAction, - excerpt_id: ExcerptId, push_to_history: bool, window: &mut Window, cx: &mut App, @@ -27210,7 +27153,6 @@ impl CodeActionProvider for Entity { &self, buffer_handle: Entity, action: CodeAction, - _excerpt_id: ExcerptId, push_to_history: bool, _window: &mut Window, cx: &mut App, @@ -27458,7 +27400,6 @@ fn snippet_completions( impl CompletionProvider for Entity { fn completions( &self, - _excerpt_id: ExcerptId, buffer: &Entity, buffer_position: text::Anchor, options: CompletionContext, @@ -27680,8 +27621,12 @@ impl SemanticsProvider for WeakEntity { buffer: &Entity, position: text::Anchor, cx: &mut App, - ) -> Option>>>> { - self.update(cx, |project, cx| { + ) -> Task>>> { + let Some(this) = self.upgrade() else { + return Task::ready(Ok(None)); + }; + + this.update(cx, |project, cx| { let buffer = buffer.clone(); let task = project.prepare_rename(buffer.clone(), position, cx); cx.spawn(async move |_, cx| { @@ -27705,7 +27650,6 @@ impl SemanticsProvider for WeakEntity { }) }) }) - .ok() } fn perform_rename( @@ -27882,6 +27826,7 @@ impl EditorSnapshot { end_row.0 += 1; } let is_created_file = hunk.is_created_file(); + let multi_buffer_range = hunk.multi_buffer_range.clone(); DisplayDiffHunk::Unfolded { status: hunk.status(), @@ -27889,10 +27834,7 @@ impl EditorSnapshot { ..hunk.diff_base_byte_range.end.0, word_diffs: hunk.word_diffs, display_row_range: hunk_display_start.row()..end_row, - multi_buffer_range: Anchor::range_in_buffer( - hunk.excerpt_id, - hunk.buffer_range, - ), + multi_buffer_range, is_created_file, } }; @@ -28213,27 +28155,23 @@ pub enum EditorEvent { utf16_range_to_replace: Option>, text: Arc, }, - ExcerptsAdded { + BufferRangesUpdated { buffer: Entity, - predecessor: ExcerptId, - excerpts: Vec<(ExcerptId, ExcerptRange)>, + path_key: PathKey, + ranges: Vec>, }, - ExcerptsRemoved { - ids: Vec, + BuffersRemoved { removed_buffer_ids: Vec, }, + BuffersEdited { + buffer_ids: Vec, + }, BufferFoldToggled { - ids: Vec, + ids: Vec, folded: bool, }, - ExcerptsEdited { - ids: Vec, - }, - ExcerptsExpanded { - ids: Vec, - }, ExpandExcerptsRequested { - excerpt_ids: Vec, + excerpt_anchors: Vec, lines: u32, direction: ExpandExcerptDirection, }, @@ -28834,11 +28772,19 @@ fn edit_prediction_edit_text( edits: &[(Range, impl AsRef)], edit_preview: &EditPreview, include_deletions: bool, + multibuffer_snapshot: &MultiBufferSnapshot, cx: &App, ) -> HighlightedText { let edits = edits .iter() - .map(|(anchor, text)| (anchor.start.text_anchor..anchor.end.text_anchor, text)) + .filter_map(|(anchor, text)| { + Some(( + multibuffer_snapshot + .anchor_range_to_buffer_anchor_range(anchor.clone())? + .1, + text, + )) + }) .collect::>(); edit_preview.highlight_edits(current_snapshot, &edits, include_deletions, cx) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 7e397507eda0d800ee9ed6b204ed95e71d50234b..c29df272d35af5a69ba07c76cb7da3866786bd2b 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -52,14 +52,13 @@ use settings::{ ProjectSettingsContent, ScrollBeyondLastLine, SearchSettingsContent, SettingsContent, SettingsStore, }; -use std::borrow::Cow; +use std::{borrow::Cow, sync::Arc}; use std::{cell::RefCell, future::Future, rc::Rc, sync::atomic::AtomicBool, time::Instant}; use std::{ iter, sync::atomic::{self, AtomicUsize}, }; use test::build_editor_with_project; -use text::ToPoint as _; use unindent::Unindent; use util::{ assert_set_eq, path, @@ -1030,12 +1029,13 @@ async fn test_navigation_history(cx: &mut TestAppContext) { original_scroll_position ); + let other_buffer = + cx.new(|cx| MultiBuffer::singleton(cx.new(|cx| Buffer::local("test", cx)), cx)); + // Ensure we don't panic when navigation data contains invalid anchors *and* points. - let mut invalid_anchor = editor - .scroll_manager - .native_anchor(&editor.display_snapshot(cx), cx) - .anchor; - invalid_anchor.text_anchor.buffer_id = BufferId::new(999).ok(); + let invalid_anchor = other_buffer.update(cx, |buffer, cx| { + buffer.snapshot(cx).anchor_after(MultiBufferOffset(3)) + }); let invalid_point = Point::new(9999, 0); editor.navigate( Arc::new(NavigationData { @@ -13836,7 +13836,7 @@ async fn test_multibuffer_format_during_save(cx: &mut TestAppContext) { 0, cx, ); - assert_eq!(multi_buffer.excerpt_ids().len(), 9); + assert_eq!(multi_buffer.read(cx).excerpts().count(), 9); multi_buffer }); let multi_buffer_editor = cx.new_window_entity(|window, cx| { @@ -18946,157 +18946,6 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { }); } -#[gpui::test] -fn test_refresh_selections(cx: &mut TestAppContext) { - init_test(cx, |_| {}); - - let buffer = cx.new(|cx| Buffer::local(sample_text(5, 4, 'a'), cx)); - let multibuffer = cx.new(|cx| { - let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.set_excerpts_for_path( - PathKey::sorted(0), - buffer.clone(), - [ - Point::new(0, 0)..Point::new(1, 4), - Point::new(3, 0)..Point::new(4, 4), - ], - 0, - cx, - ); - multibuffer - }); - - let editor = cx.add_window(|window, cx| { - let mut editor = build_editor(multibuffer.clone(), window, cx); - let snapshot = editor.snapshot(window, cx); - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges([Point::new(1, 3)..Point::new(1, 3)]) - }); - editor.begin_selection( - Point::new(2, 1).to_display_point(&snapshot), - true, - 1, - window, - cx, - ); - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - [ - Point::new(1, 3)..Point::new(1, 3), - Point::new(2, 1)..Point::new(2, 1), - ] - ); - editor - }); - - // Refreshing selections is a no-op when excerpts haven't changed. - _ = editor.update(cx, |editor, window, cx| { - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh()); - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - [ - Point::new(1, 3)..Point::new(1, 3), - Point::new(2, 1)..Point::new(2, 1), - ] - ); - }); - - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.set_excerpts_for_path( - PathKey::sorted(0), - buffer.clone(), - [Point::new(3, 0)..Point::new(4, 4)], - 0, - cx, - ); - }); - _ = editor.update(cx, |editor, window, cx| { - // Removing an excerpt causes the first selection to become degenerate. - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - [ - Point::new(0, 0)..Point::new(0, 0), - Point::new(0, 1)..Point::new(0, 1) - ] - ); - - // Refreshing selections will relocate the first selection to the original buffer - // location. - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh()); - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - [ - Point::new(0, 0)..Point::new(0, 0), - Point::new(0, 1)..Point::new(0, 1), - ] - ); - assert!(editor.selections.pending_anchor().is_some()); - }); -} - -#[gpui::test] -fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) { - init_test(cx, |_| {}); - - let buffer = cx.new(|cx| Buffer::local(sample_text(5, 4, 'a'), cx)); - let multibuffer = cx.new(|cx| { - let mut multibuffer = MultiBuffer::new(ReadWrite); - multibuffer.set_excerpts_for_path( - PathKey::sorted(0), - buffer.clone(), - [ - Point::new(0, 0)..Point::new(1, 4), - Point::new(3, 0)..Point::new(4, 4), - ], - 0, - cx, - ); - assert_eq!(multibuffer.read(cx).text(), "aaaa\nbbbb\ndddd\neeee"); - multibuffer - }); - - let editor = cx.add_window(|window, cx| { - let mut editor = build_editor(multibuffer.clone(), window, cx); - let snapshot = editor.snapshot(window, cx); - editor.begin_selection( - Point::new(1, 3).to_display_point(&snapshot), - false, - 1, - window, - cx, - ); - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - [Point::new(1, 3)..Point::new(1, 3)] - ); - editor - }); - - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.set_excerpts_for_path( - PathKey::sorted(0), - buffer.clone(), - [Point::new(3, 0)..Point::new(4, 4)], - 0, - cx, - ); - }); - _ = editor.update(cx, |editor, window, cx| { - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - [Point::new(0, 0)..Point::new(0, 0)] - ); - - // Ensure we don't panic when selections are refreshed and that the pending selection is finalized. - editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| s.refresh()); - assert_eq!( - editor.selections.ranges(&editor.display_snapshot(cx)), - [Point::new(0, 0)..Point::new(0, 0)] - ); - assert!(editor.selections.pending_anchor().is_some()); - }); -} - #[gpui::test] async fn test_extra_newline_insertion(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -19263,7 +19112,7 @@ async fn test_copy_highlight_json(cx: &mut TestAppContext) { let x = 1;ˇ } "}); - setup_rust_syntax_highlighting(&mut cx); + setup_syntax_highlighting(rust_lang(), &mut cx); cx.update_editor(|editor, window, cx| { editor.copy_highlight_json(&CopyHighlightJson, window, cx); @@ -19311,7 +19160,7 @@ async fn test_copy_highlight_json_selected_range(cx: &mut TestAppContext) { let yˇ» = 2; } "}); - setup_rust_syntax_highlighting(&mut cx); + setup_syntax_highlighting(rust_lang(), &mut cx); cx.update_editor(|editor, window, cx| { editor.copy_highlight_json(&CopyHighlightJson, window, cx); @@ -19354,7 +19203,7 @@ async fn test_copy_highlight_json_selected_line_range(cx: &mut TestAppContext) { let yˇ» = 2; } "}); - setup_rust_syntax_highlighting(&mut cx); + setup_syntax_highlighting(rust_lang(), &mut cx); cx.update_editor(|editor, window, cx| { editor.selections.set_line_mode(true); @@ -19404,7 +19253,7 @@ async fn test_copy_highlight_json_single_line(cx: &mut TestAppContext) { let y = 2; } "}); - setup_rust_syntax_highlighting(&mut cx); + setup_syntax_highlighting(rust_lang(), &mut cx); cx.update_editor(|editor, window, cx| { editor.selections.set_line_mode(true); @@ -19431,34 +19280,6 @@ async fn test_copy_highlight_json_single_line(cx: &mut TestAppContext) { ); } -fn setup_rust_syntax_highlighting(cx: &mut EditorTestContext) { - let syntax = SyntaxTheme::new_test(vec![ - ("keyword", Hsla::red()), - ("function", Hsla::blue()), - ("variable", Hsla::green()), - ("number", Hsla::default()), - ("operator", Hsla::default()), - ("punctuation.bracket", Hsla::default()), - ("punctuation.delimiter", Hsla::default()), - ]); - - let language = rust_lang(); - language.set_theme(&syntax); - - cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); - cx.executor().run_until_parked(); - cx.update_editor(|editor, window, cx| { - editor.set_style( - EditorStyle { - syntax: Arc::new(syntax), - ..Default::default() - }, - window, - cx, - ); - }); -} - #[gpui::test] async fn test_following(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -19738,8 +19559,8 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) { let (buffer_1, buffer_2) = project.update(cx, |project, cx| { ( - project.create_local_buffer("abc\ndef\nghi\njkl\n", None, false, cx), - project.create_local_buffer("mno\npqr\nstu\nvwx\n", None, false, cx), + project.create_local_buffer("abc\ndef\nghi\njkl\nmno\npqr\nstu\nvwx\nyza\nbcd\nefg\nhij\nklm\nnop\nqrs\ntuv\nwxy\nzab\ncde\nfgh\n", None, false, cx), + project.create_local_buffer("aaa\nbbb\nccc\nddd\neee\nfff\nggg\nhhh\niii\njjj\nkkk\nlll\nmmm\nnnn\nooo\nppp\nqqq\nrrr\nsss\nttt\n", None, false, cx), ) }); @@ -19814,7 +19635,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) { // Remove some excerpts. leader.update(cx, |leader, cx| { leader.buffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts_for_path( + multibuffer.remove_excerpts( PathKey::with_sort_prefix(1, rel_path("b.txt").into_arc()), cx, ); @@ -23318,7 +23139,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut TestAppContext) { 0, cx, ); - assert_eq!(multibuffer.excerpt_ids().len(), 9); + assert_eq!(multibuffer.read(cx).excerpts().count(), 9); multibuffer }); @@ -23422,7 +23243,7 @@ async fn test_expand_diff_hunk_at_excerpt_boundary(cx: &mut TestAppContext) { 0, cx, ); - assert_eq!(multibuffer.excerpt_ids().len(), 3); + assert_eq!(multibuffer.read(cx).excerpts().count(), 3); multibuffer }); @@ -24191,9 +24012,13 @@ async fn setup_indent_guides_editor( let buffer_id = cx.update_editor(|editor, window, cx| { editor.set_text(text, window, cx); - let buffer_ids = editor.buffer().read(cx).excerpt_buffer_ids(); - - buffer_ids[0] + editor + .buffer() + .read(cx) + .as_singleton() + .unwrap() + .read(cx) + .remote_id() }); (buffer_id, cx) @@ -24902,7 +24727,7 @@ async fn test_indent_guide_with_expanded_diff_hunks(cx: &mut TestAppContext) { editor .snapshot(window, cx) .buffer_snapshot() - .indent_guides_in_range(Anchor::min()..Anchor::max(), false, cx) + .indent_guides_in_range(Anchor::Min..Anchor::Max, false, cx) .map(|guide| (guide.start_row..=guide.end_row, guide.depth)) .collect::>() }); @@ -24957,12 +24782,19 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp let hunk_ranges = cx.update_editor(|editor, window, cx| { let snapshot = editor.snapshot(window, cx); let hunks = editor - .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot()) + .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot()) .collect::>(); - let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0]; + let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx); hunks .into_iter() - .map(|hunk| Anchor::range_in_buffer(excerpt_id, hunk.buffer_range)) + .map(|hunk| { + multibuffer_snapshot + .anchor_in_excerpt(hunk.buffer_range.start) + .unwrap() + ..multibuffer_snapshot + .anchor_in_excerpt(hunk.buffer_range.end) + .unwrap() + }) .collect::>() }); assert_eq!(hunk_ranges.len(), 2); @@ -25047,12 +24879,19 @@ async fn test_adjacent_diff_hunks(executor: BackgroundExecutor, cx: &mut TestApp let hunk_ranges = cx.update_editor(|editor, window, cx| { let snapshot = editor.snapshot(window, cx); let hunks = editor - .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot()) + .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot()) .collect::>(); - let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0]; + let multibuffer_snapshot = snapshot.buffer_snapshot(); hunks .into_iter() - .map(|hunk| Anchor::range_in_buffer(excerpt_id, hunk.buffer_range)) + .map(|hunk| { + multibuffer_snapshot + .anchor_in_excerpt(hunk.buffer_range.start) + .unwrap() + ..multibuffer_snapshot + .anchor_in_excerpt(hunk.buffer_range.end) + .unwrap() + }) .collect::>() }); assert_eq!(hunk_ranges.len(), 2); @@ -25112,12 +24951,19 @@ async fn test_toggle_deletion_hunk_at_start_of_file( let hunk_ranges = cx.update_editor(|editor, window, cx| { let snapshot = editor.snapshot(window, cx); let hunks = editor - .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot()) + .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot()) .collect::>(); - let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0]; + let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx); hunks .into_iter() - .map(|hunk| Anchor::range_in_buffer(excerpt_id, hunk.buffer_range)) + .map(|hunk| { + multibuffer_snapshot + .anchor_in_excerpt(hunk.buffer_range.start) + .unwrap() + ..multibuffer_snapshot + .anchor_in_excerpt(hunk.buffer_range.end) + .unwrap() + }) .collect::>() }); assert_eq!(hunk_ranges.len(), 1); @@ -25217,12 +25063,17 @@ async fn test_expand_first_line_diff_hunk_keeps_deleted_lines_visible( // Expanding a diff hunk at the first line inserts deleted lines above the first buffer line. cx.update_editor(|editor, window, cx| { let snapshot = editor.snapshot(window, cx); - let excerpt_id = editor.buffer.read(cx).excerpt_ids()[0]; + let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx); let hunks = editor - .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot()) + .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot()) .collect::>(); assert_eq!(hunks.len(), 1); - let hunk_range = Anchor::range_in_buffer(excerpt_id, hunks[0].buffer_range.clone()); + let hunk_range = multibuffer_snapshot + .anchor_in_excerpt(hunks[0].buffer_range.start) + .unwrap() + ..multibuffer_snapshot + .anchor_in_excerpt(hunks[0].buffer_range.end) + .unwrap(); editor.toggle_single_diff_hunk(hunk_range, cx) }); executor.run_until_parked(); @@ -25279,7 +25130,7 @@ async fn test_display_diff_hunks(cx: &mut TestAppContext) { multibuffer.set_excerpts_for_path( PathKey::with_sort_prefix(0, buffer.read(cx).file().unwrap().path().clone()), buffer.clone(), - vec![text::Anchor::MIN.to_point(&snapshot)..text::Anchor::MAX.to_point(&snapshot)], + vec![Point::zero()..snapshot.max_point()], 2, cx, ); @@ -25365,7 +25216,7 @@ async fn test_partially_staged_hunk(cx: &mut TestAppContext) { cx.update_editor(|editor, window, cx| { let snapshot = editor.snapshot(window, cx); let hunks = editor - .diff_hunks_in_ranges(&[Anchor::min()..Anchor::max()], &snapshot.buffer_snapshot()) + .diff_hunks_in_ranges(&[Anchor::Min..Anchor::Max], &snapshot.buffer_snapshot()) .collect::>(); assert_eq!(hunks.len(), 1); assert_eq!( @@ -26450,7 +26301,7 @@ async fn test_folded_buffers_cleared_on_excerpts_removed(cx: &mut TestAppContext // `multi_buffer::Event::ExcerptsRemoved` event is emitted, which should be // picked up by the editor and update the display map accordingly. multi_buffer.update(cx, |multi_buffer, cx| { - multi_buffer.remove_excerpts_for_path(PathKey::sorted(0), cx) + multi_buffer.remove_excerpts(PathKey::sorted(0), cx) }); assert!(!editor.update(cx, |editor, cx| editor.has_any_buffer_folded(cx))); } @@ -26702,7 +26553,12 @@ async fn test_multi_buffer_navigation_with_folded_buffers(cx: &mut TestAppContex ); let mut editor = Editor::new(EditorMode::full(), multi_buffer.clone(), None, window, cx); - let buffer_ids = multi_buffer.read(cx).excerpt_buffer_ids(); + let buffer_ids = multi_buffer + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) + .collect::>(); // fold all but the second buffer, so that we test navigating between two // adjacent folded buffers, as well as folded buffers at the start and // end the multibuffer @@ -27038,7 +26894,12 @@ async fn assert_highlighted_edits( let text_anchor_edits = edits .clone() .into_iter() - .map(|(range, edit)| (range.start.text_anchor..range.end.text_anchor, edit.into())) + .map(|(range, edit)| { + ( + range.start.expect_text_anchor()..range.end.expect_text_anchor(), + edit.into(), + ) + }) .collect::>(); let edit_preview = window @@ -27055,10 +26916,11 @@ async fn assert_highlighted_edits( cx.update(|_window, cx| { let highlighted_edits = edit_prediction_edit_text( - snapshot.as_singleton().unwrap().2, + snapshot.as_singleton().unwrap(), &edits, &edit_preview, include_deletions, + &snapshot, cx, ); assertion_fn(highlighted_edits, cx) @@ -31479,12 +31341,8 @@ async fn test_paste_url_from_other_app_creates_markdown_link_selectively_in_mult Point::new(1, 21)..Point::new(1, 25), ]) }); - let first_buffer_id = multi_buffer - .read(cx) - .excerpt_buffer_ids() - .into_iter() - .next() - .unwrap(); + let snapshot = multi_buffer.read(cx).snapshot(cx); + let first_buffer_id = snapshot.all_buffer_ids().next().unwrap(); let first_buffer = multi_buffer.read(cx).buffer(first_buffer_id).unwrap(); first_buffer.update(cx, |buffer, cx| { buffer.set_language(Some(markdown_language.clone()), cx); @@ -32530,7 +32388,12 @@ async fn test_multibuffer_selections_with_folding(cx: &mut TestAppContext) { }); let mut cx = EditorTestContext::for_editor_in(editor.clone(), cx).await; - let buffer_ids = cx.multibuffer(|mb, _| mb.excerpt_buffer_ids()); + let buffer_ids = cx.multibuffer(|mb, cx| { + mb.snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) + .collect::>() + }); cx.assert_excerpts_with_selections(indoc! {" [EXCERPT] @@ -33770,7 +33633,7 @@ async fn test_diff_review_button_shown_when_ai_enabled(cx: &mut TestAppContext) } /// Helper function to create a DiffHunkKey for testing. -/// Uses Anchor::min() as a placeholder anchor since these tests don't need +/// Uses Anchor::Min as a placeholder anchor since these tests don't need /// real buffer positioning. fn test_hunk_key(file_path: &str) -> DiffHunkKey { DiffHunkKey { @@ -33779,7 +33642,7 @@ fn test_hunk_key(file_path: &str) -> DiffHunkKey { } else { Arc::from(util::rel_path::RelPath::unix(file_path).unwrap()) }, - hunk_start_anchor: Anchor::min(), + hunk_start_anchor: Anchor::Min, } } @@ -33802,7 +33665,7 @@ fn add_test_comment( comment: &str, cx: &mut Context, ) -> usize { - editor.add_review_comment(key, comment.to_string(), Anchor::min()..Anchor::max(), cx) + editor.add_review_comment(key, comment.to_string(), Anchor::Min..Anchor::Max, cx) } #[gpui::test] @@ -35838,3 +35701,75 @@ async fn test_align_selections_multicolumn(cx: &mut TestAppContext) { cx.update_editor(|e, window, cx| e.align_selections(&AlignSelections, window, cx)); cx.assert_editor_state(after); } + +#[gpui::test] +async fn test_custom_fallback_highlights(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + cx.set_state(indoc! {"fn main(self, variable: TType) {ˇ}"}); + + let variable_color = Hsla::green(); + let function_color = Hsla::blue(); + + let test_cases = [ + ("@variable", Some(variable_color)), + ("@type", None), + ("@type @variable", Some(variable_color)), + ("@variable @type", Some(variable_color)), + ("@variable @function", Some(function_color)), + ("@function @variable", Some(variable_color)), + ]; + + for (test_case, expected) in test_cases { + let custom_rust_lang = Arc::into_inner(rust_lang()) + .unwrap() + .with_highlights_query(format! {r#"(type_identifier) {test_case}"#}.as_str()) + .unwrap(); + let theme = setup_syntax_highlighting(Arc::new(custom_rust_lang), &mut cx); + let expected = expected.map_or_else(Vec::new, |expected_color| { + vec![(24..29, HighlightStyle::color(expected_color))] + }); + + cx.update_editor(|editor, window, cx| { + let snapshot = editor.snapshot(window, cx); + assert_eq!( + expected, + snapshot.combined_highlights(MultiBufferOffset(0)..snapshot.buffer().len(), &theme), + "Test case with '{test_case}' highlights query did not pass", + ); + }); + } +} + +fn setup_syntax_highlighting( + language: Arc, + cx: &mut EditorTestContext, +) -> Arc { + let syntax = Arc::new(SyntaxTheme::new_test(vec![ + ("keyword", Hsla::red()), + ("function", Hsla::blue()), + ("variable", Hsla::green()), + ("number", Hsla::default()), + ("operator", Hsla::default()), + ("punctuation.bracket", Hsla::default()), + ("punctuation.delimiter", Hsla::default()), + ])); + + language.set_theme(&syntax); + + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + cx.executor().run_until_parked(); + cx.update_editor(|editor, window, cx| { + editor.set_style( + EditorStyle { + syntax: syntax.clone(), + ..EditorStyle::default() + }, + window, + cx, + ); + }); + + syntax +} diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 2fdb2686ee00ea2fc27881b0c18a54fa85466d9a..7a532dc7a75ea3583456be6611ef072cd7692bc7 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -54,7 +54,7 @@ use itertools::Itertools; use language::{HighlightedText, IndentGuideSettings, language_settings::ShowWhitespaceSetting}; use markdown::Markdown; use multi_buffer::{ - Anchor, ExcerptId, ExcerptInfo, ExpandExcerptDirection, ExpandInfo, MultiBufferPoint, + Anchor, ExcerptBoundaryInfo, ExpandExcerptDirection, ExpandInfo, MultiBufferPoint, MultiBufferRow, RowInfo, }; @@ -1390,13 +1390,13 @@ impl EditorElement { .snapshot .display_point_to_anchor(valid_point, Bias::Left); - if let Some((buffer_snapshot, file)) = position_map + if let Some((buffer_anchor, buffer_snapshot)) = position_map .snapshot .buffer_snapshot() - .buffer_for_excerpt(buffer_anchor.excerpt_id) - .and_then(|buffer| buffer.file().map(|file| (buffer, file))) + .anchor_to_buffer_anchor(buffer_anchor) + && let Some(file) = buffer_snapshot.file() { - let as_point = text::ToPoint::to_point(&buffer_anchor.text_anchor, buffer_snapshot); + let as_point = text::ToPoint::to_point(&buffer_anchor, buffer_snapshot); let is_visible = editor .gutter_breakpoint_indicator @@ -1752,7 +1752,7 @@ impl EditorElement { // Remote cursors if let Some(collaboration_hub) = &editor.collaboration_hub { for remote_selection in snapshot.remote_selections_in_range( - &(Anchor::min()..Anchor::max()), + &(Anchor::Min..Anchor::Max), collaboration_hub.deref(), cx, ) { @@ -2589,12 +2589,6 @@ impl EditorElement { const INLINE_SLOT_CHAR_LIMIT: u32 = 4; const MAX_ALTERNATE_DISTANCE: u32 = 8; - let excerpt_id = snapshot - .display_snapshot - .buffer_snapshot() - .excerpt_containing(buffer_point..buffer_point) - .map(|excerpt| excerpt.id()); - let is_valid_row = |row_candidate: u32| -> bool { // move to other row if folded row if snapshot.is_line_folded(MultiBufferRow(row_candidate)) { @@ -2610,13 +2604,18 @@ impl EditorElement { row: row_candidate, column: 0, }; - let candidate_excerpt_id = snapshot + // move to other row if different excerpt + let range = if candidate_point < buffer_point { + candidate_point..buffer_point + } else { + buffer_point..candidate_point + }; + if snapshot .display_snapshot .buffer_snapshot() - .excerpt_containing(candidate_point..candidate_point) - .map(|excerpt| excerpt.id()); - // move to other row if different excerpt - if excerpt_id != candidate_excerpt_id { + .excerpt_containing(range) + .is_none() + { return false; } } @@ -2796,7 +2795,7 @@ impl EditorElement { .newest::(&editor_snapshot.display_snapshot) .head(); - let Some((buffer, buffer_point, _)) = editor_snapshot + let Some((buffer, buffer_point)) = editor_snapshot .buffer_snapshot() .point_to_buffer_point(cursor_point) else { @@ -3389,8 +3388,8 @@ impl EditorElement { .enumerate() .map(|(ix, row_info)| { let ExpandInfo { - excerpt_id, direction, + start_anchor, } = row_info.expand_info?; let icon_name = match direction { @@ -3419,7 +3418,7 @@ impl EditorElement { .width(width) .on_click(move |_, window, cx| { editor.update(cx, |editor, cx| { - editor.expand_excerpt(excerpt_id, direction, window, cx); + editor.expand_excerpt(start_anchor, direction, window, cx); }); }) .tooltip(Tooltip::for_action_title( @@ -3886,7 +3885,7 @@ impl EditorElement { selected_buffer_ids: &Vec, latest_selection_anchors: &HashMap, is_row_soft_wrapped: impl Copy + Fn(usize) -> bool, - sticky_header_excerpt_id: Option, + sticky_header_excerpt_id: Option, indent_guides: &Option>, block_resize_offset: &mut i32, window: &mut Window, @@ -3974,7 +3973,7 @@ impl EditorElement { let mut result = v_flex().id(block_id).w_full().pr(editor_margins.right); if self.should_show_buffer_headers() { - let selected = selected_buffer_ids.contains(&first_excerpt.buffer_id); + let selected = selected_buffer_ids.contains(&first_excerpt.buffer_id()); let jump_data = header_jump_data( snapshot, block_row_start, @@ -4029,8 +4028,8 @@ impl EditorElement { latest_selection_anchors, ); - if sticky_header_excerpt_id != Some(excerpt.id) { - let selected = selected_buffer_ids.contains(&excerpt.buffer_id); + if sticky_header_excerpt_id != Some(excerpt.buffer_id()) { + let selected = selected_buffer_ids.contains(&excerpt.buffer_id()); result = result.child(div().pr(editor_margins.right).child( self.render_buffer_header( @@ -4190,7 +4189,7 @@ impl EditorElement { fn render_buffer_header( &self, - for_excerpt: &ExcerptInfo, + for_excerpt: &ExcerptBoundaryInfo, is_folded: bool, is_selected: bool, is_sticky: bool, @@ -4227,7 +4226,7 @@ impl EditorElement { selected_buffer_ids: &Vec, latest_selection_anchors: &HashMap, is_row_soft_wrapped: impl Copy + Fn(usize) -> bool, - sticky_header_excerpt_id: Option, + sticky_header_excerpt_id: Option, indent_guides: &Option>, window: &mut Window, cx: &mut App, @@ -4520,7 +4519,7 @@ impl EditorElement { let editor_bg_color = cx.theme().colors().editor_background; - let selected = selected_buffer_ids.contains(&excerpt.buffer_id); + let selected = selected_buffer_ids.contains(&excerpt.buffer_id()); let available_width = hitbox.bounds.size.width - right_margin; @@ -7894,23 +7893,26 @@ impl EditorElement { return; } let buffer_snapshot = &display_snapshot.buffer_snapshot(); - for (buffer, buffer_range, excerpt_id) in - buffer_snapshot.range_to_buffer_ranges(anchor_range.start..=anchor_range.end) + for (excerpt_buffer_snapshot, buffer_range, _) in + buffer_snapshot.range_to_buffer_ranges(anchor_range.start..anchor_range.end) { - let buffer_range = - buffer.anchor_after(buffer_range.start)..buffer.anchor_before(buffer_range.end); + let buffer_range = excerpt_buffer_snapshot.anchor_after(buffer_range.start) + ..excerpt_buffer_snapshot.anchor_before(buffer_range.end); selections.extend(debug_ranges.ranges.iter().flat_map(|debug_range| { - let player_color = theme - .players() - .color_for_participant(debug_range.occurrence_index as u32 + 1); - debug_range.ranges.iter().filter_map(move |range| { - if range.start.buffer_id != Some(buffer.remote_id()) { + debug_range.ranges.iter().filter_map(|range| { + let player_color = theme + .players() + .color_for_participant(debug_range.occurrence_index as u32 + 1); + if range.start.buffer_id != excerpt_buffer_snapshot.remote_id() { return None; } - let clipped_start = range.start.max(&buffer_range.start, buffer); - let clipped_end = range.end.min(&buffer_range.end, buffer); + let clipped_start = range + .start + .max(&buffer_range.start, &excerpt_buffer_snapshot); + let clipped_end = + range.end.min(&buffer_range.end, &excerpt_buffer_snapshot); let range = buffer_snapshot - .anchor_range_in_excerpt(excerpt_id, *clipped_start..*clipped_end)?; + .buffer_anchor_range_to_anchor_range(*clipped_start..*clipped_end)?; let start = range.start.to_display_point(display_snapshot); let end = range.end.to_display_point(display_snapshot); let selection_layout = SelectionLayout { @@ -8150,49 +8152,23 @@ pub(crate) fn header_jump_data( editor_snapshot: &EditorSnapshot, block_row_start: DisplayRow, height: u32, - first_excerpt: &ExcerptInfo, + first_excerpt: &ExcerptBoundaryInfo, latest_selection_anchors: &HashMap, ) -> JumpData { - let jump_target = if let Some(anchor) = latest_selection_anchors.get(&first_excerpt.buffer_id) - && let Some(range) = editor_snapshot.context_range_for_excerpt(anchor.excerpt_id) - && let Some(buffer) = editor_snapshot - .buffer_snapshot() - .buffer_for_excerpt(anchor.excerpt_id) + let multibuffer_snapshot = editor_snapshot.buffer_snapshot(); + let buffer = first_excerpt.buffer(multibuffer_snapshot); + let (jump_anchor, jump_buffer) = if let Some(anchor) = + latest_selection_anchors.get(&first_excerpt.buffer_id()) + && let Some((jump_anchor, selection_buffer)) = + multibuffer_snapshot.anchor_to_buffer_anchor(*anchor) { - JumpTargetInExcerptInput { - id: anchor.excerpt_id, - buffer, - excerpt_start_anchor: range.start, - jump_anchor: anchor.text_anchor, - } + (jump_anchor, selection_buffer) } else { - JumpTargetInExcerptInput { - id: first_excerpt.id, - buffer: &first_excerpt.buffer, - excerpt_start_anchor: first_excerpt.range.context.start, - jump_anchor: first_excerpt.range.primary.start, - } + (first_excerpt.range.primary.start, buffer) }; - header_jump_data_inner(editor_snapshot, block_row_start, height, &jump_target) -} - -struct JumpTargetInExcerptInput<'a> { - id: ExcerptId, - buffer: &'a language::BufferSnapshot, - excerpt_start_anchor: text::Anchor, - jump_anchor: text::Anchor, -} - -fn header_jump_data_inner( - snapshot: &EditorSnapshot, - block_row_start: DisplayRow, - height: u32, - for_excerpt: &JumpTargetInExcerptInput, -) -> JumpData { - let buffer = &for_excerpt.buffer; - let jump_position = language::ToPoint::to_point(&for_excerpt.jump_anchor, buffer); - let excerpt_start = for_excerpt.excerpt_start_anchor; - let rows_from_excerpt_start = if for_excerpt.jump_anchor == excerpt_start { + let excerpt_start = first_excerpt.range.context.start; + let jump_position = language::ToPoint::to_point(&jump_anchor, jump_buffer); + let rows_from_excerpt_start = if jump_anchor == excerpt_start { 0 } else { let excerpt_start_point = language::ToPoint::to_point(&excerpt_start, buffer); @@ -8201,15 +8177,14 @@ fn header_jump_data_inner( let line_offset_from_top = (block_row_start.0 + height + rows_from_excerpt_start) .saturating_sub( - snapshot + editor_snapshot .scroll_anchor - .scroll_position(&snapshot.display_snapshot) + .scroll_position(&editor_snapshot.display_snapshot) .y as u32, ); JumpData::MultiBufferPoint { - excerpt_id: for_excerpt.id, - anchor: for_excerpt.jump_anchor, + anchor: jump_anchor, position: jump_position, line_offset_from_top, } @@ -8217,7 +8192,7 @@ fn header_jump_data_inner( pub(crate) fn render_buffer_header( editor: &Entity, - for_excerpt: &ExcerptInfo, + for_excerpt: &ExcerptBoundaryInfo, is_folded: bool, is_selected: bool, is_sticky: bool, @@ -8229,6 +8204,8 @@ pub(crate) fn render_buffer_header( let multi_buffer = editor_read.buffer.read(cx); let is_read_only = editor_read.read_only(cx); let editor_handle: &dyn ItemHandle = editor; + let multibuffer_snapshot = multi_buffer.snapshot(cx); + let buffer = for_excerpt.buffer(&multibuffer_snapshot); let breadcrumbs = if is_selected { editor_read.breadcrumbs_inner(cx) @@ -8236,31 +8213,30 @@ pub(crate) fn render_buffer_header( None }; + let buffer_id = for_excerpt.buffer_id(); let file_status = multi_buffer .all_diff_hunks_expanded() - .then(|| editor_read.status_for_buffer_id(for_excerpt.buffer_id, cx)) + .then(|| editor_read.status_for_buffer_id(buffer_id, cx)) .flatten(); - let indicator = multi_buffer - .buffer(for_excerpt.buffer_id) - .and_then(|buffer| { - let buffer = buffer.read(cx); - let indicator_color = match (buffer.has_conflict(), buffer.is_dirty()) { - (true, _) => Some(Color::Warning), - (_, true) => Some(Color::Accent), - (false, false) => None, - }; - indicator_color.map(|indicator_color| Indicator::dot().color(indicator_color)) - }); + let indicator = multi_buffer.buffer(buffer_id).and_then(|buffer| { + let buffer = buffer.read(cx); + let indicator_color = match (buffer.has_conflict(), buffer.is_dirty()) { + (true, _) => Some(Color::Warning), + (_, true) => Some(Color::Accent), + (false, false) => None, + }; + indicator_color.map(|indicator_color| Indicator::dot().color(indicator_color)) + }); let include_root = editor_read .project .as_ref() .map(|project| project.read(cx).visible_worktrees(cx).count() > 1) .unwrap_or_default(); - let file = for_excerpt.buffer.file(); + let file = buffer.file(); let can_open_excerpts = file.is_none_or(|file| file.can_open()); let path_style = file.map(|file| file.path_style(cx)); - let relative_path = for_excerpt.buffer.resolve_file_path(include_root, cx); + let relative_path = buffer.resolve_file_path(include_root, cx); let (parent_path, filename) = if let Some(path) = &relative_path { if let Some(path_style) = path_style { let (dir, file_name) = path_style.split(path); @@ -8275,7 +8251,7 @@ pub(crate) fn render_buffer_header( let colors = cx.theme().colors(); let header = div() - .id(("buffer-header", for_excerpt.buffer_id.to_proto())) + .id(("buffer-header", buffer_id.to_proto())) .p(BUFFER_HEADER_PADDING) .w_full() .h(FILE_HEADER_HEIGHT as f32 * window.line_height()) @@ -8303,7 +8279,7 @@ pub(crate) fn render_buffer_header( .hover(|style| style.bg(colors.element_hover)) .map(|header| { let editor = editor.clone(); - let buffer_id = for_excerpt.buffer_id; + let buffer_id = for_excerpt.buffer_id(); let toggle_chevron_icon = FileIcons::get_chevron_icon(!is_folded, cx).map(Icon::from_path); let button_size = rems_from_px(28.); @@ -8367,7 +8343,7 @@ pub(crate) fn render_buffer_header( .addons .values() .filter_map(|addon| { - addon.render_buffer_header_controls(for_excerpt, window, cx) + addon.render_buffer_header_controls(for_excerpt, buffer, window, cx) }) .take(1), ) @@ -8460,7 +8436,7 @@ pub(crate) fn render_buffer_header( ), ) }) - .when(!for_excerpt.buffer.capability.editable(), |el| { + .when(!buffer.capability.editable(), |el| { el.child(Icon::new(IconName::FileLock).color(Color::Muted)) }) .when_some(breadcrumbs, |then, breadcrumbs| { @@ -8511,7 +8487,7 @@ pub(crate) fn render_buffer_header( }) .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) .on_click(window.listener_for(editor, { - let buffer_id = for_excerpt.buffer_id; + let buffer_id = for_excerpt.buffer_id(); move |editor, e: &ClickEvent, window, cx| { if e.modifiers().alt { editor.open_excerpts_common( @@ -8533,7 +8509,7 @@ pub(crate) fn render_buffer_header( ), ); - let file = for_excerpt.buffer.file().cloned(); + let file = buffer.file().cloned(); let editor = editor.clone(); right_click_menu("buffer-header-context-menu") @@ -9855,14 +9831,14 @@ impl Element for EditorElement { }; let start_anchor = if start_row == Default::default() { - Anchor::min() + Anchor::Min } else { snapshot.buffer_snapshot().anchor_before( DisplayPoint::new(start_row, 0).to_offset(&snapshot, Bias::Left), ) }; let end_anchor = if end_row > max_row { - Anchor::max() + Anchor::Max } else { snapshot.buffer_snapshot().anchor_before( DisplayPoint::new(end_row, 0).to_offset(&snapshot, Bias::Right), @@ -9888,7 +9864,7 @@ impl Element for EditorElement { editor.update(cx, |editor, cx| { let snapshot = editor.snapshot(window, cx); let start_anchor = if start_row == Default::default() { - Anchor::min() + Anchor::Min } else { snapshot.buffer_snapshot().anchor_before( DisplayPoint::new(start_row, 0) @@ -9896,7 +9872,7 @@ impl Element for EditorElement { ) }; let end_anchor = if end_row > max_row { - Anchor::max() + Anchor::Max } else { snapshot.buffer_snapshot().anchor_before( DisplayPoint::new(end_row, 0) @@ -10052,9 +10028,11 @@ impl Element for EditorElement { HashMap::default(); for selection in all_anchor_selections.iter() { let head = selection.head(); - if let Some(buffer_id) = head.text_anchor.buffer_id { + if let Some((text_anchor, _)) = + snapshot.buffer_snapshot().anchor_to_buffer_anchor(head) + { anchors_by_buffer - .entry(buffer_id) + .entry(text_anchor.buffer_id) .and_modify(|(latest_id, latest_anchor)| { if selection.id > *latest_id { *latest_id = selection.id; @@ -10322,8 +10300,9 @@ impl Element for EditorElement { } else { None }; - let sticky_header_excerpt_id = - sticky_header_excerpt.as_ref().map(|top| top.excerpt.id); + let sticky_header_excerpt_id = sticky_header_excerpt + .as_ref() + .map(|top| top.excerpt.buffer_id()); let buffer = snapshot.buffer_snapshot(); let start_buffer_row = MultiBufferRow(start_anchor.to_point(&buffer).row); @@ -12968,7 +12947,7 @@ mod tests { editor.insert_blocks( [BlockProperties { style: BlockStyle::Fixed, - placement: BlockPlacement::Above(Anchor::min()), + placement: BlockPlacement::Above(Anchor::Min), height: Some(3), render: Arc::new(|cx| div().h(3. * cx.window.line_height()).into_any()), priority: 0, diff --git a/crates/editor/src/folding_ranges.rs b/crates/editor/src/folding_ranges.rs index de32f481d52e501eea8f7814f4b114fbdbbd0458..c59a3e004a8b4f791af2d44be19878239ece1d42 100644 --- a/crates/editor/src/folding_ranges.rs +++ b/crates/editor/src/folding_ranges.rs @@ -21,9 +21,9 @@ impl Editor { }; let buffers_to_query = self - .visible_excerpts(true, cx) - .into_values() - .map(|(buffer, ..)| buffer) + .visible_buffers(cx) + .into_iter() + .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx)) .chain(for_buffer.and_then(|id| self.buffer.read(cx).buffer(id))) .filter(|buffer| { let id = buffer.read(cx).remote_id(); diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 827d182a0f11508ae301691f832e7ec04a728364..9ba5c4aa19cd66c454bf633a04636cd63bd180b8 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -204,8 +204,8 @@ impl GitBlame { git_blame.generate(cx); } } - multi_buffer::Event::ExcerptsAdded { .. } - | multi_buffer::Event::ExcerptsEdited { .. } => git_blame.regenerate_on_edit(cx), + multi_buffer::Event::BufferRangesUpdated { .. } + | multi_buffer::Event::BuffersEdited { .. } => git_blame.regenerate_on_edit(cx), _ => {} }, ); @@ -346,11 +346,10 @@ impl GitBlame { let Some(multi_buffer) = self.multi_buffer.upgrade() else { return; }; - multi_buffer - .read(cx) - .excerpt_buffer_ids() - .into_iter() - .for_each(|id| self.sync(cx, id)); + let snapshot = multi_buffer.read(cx).snapshot(cx); + for id in snapshot.all_buffer_ids() { + self.sync(cx, id) + } } fn sync(&mut self, cx: &mut App, buffer_id: BufferId) { @@ -497,10 +496,10 @@ impl GitBlame { } let buffers_to_blame = self .multi_buffer - .update(cx, |multi_buffer, _| { - multi_buffer + .update(cx, |multi_buffer, cx| { + let snapshot = multi_buffer.snapshot(cx); + snapshot .all_buffer_ids() - .into_iter() .filter_map(|id| Some(multi_buffer.buffer(id)?.downgrade())) .collect::>() }) diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 3a6ff4ec0e4fc53d19bfb51a10b1f7790933b175..e00fd20ed5abdcd49dbe87510bfd8de54b60fce2 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -237,7 +237,8 @@ impl Editor { let Some(mb_anchor) = self .buffer() .read(cx) - .buffer_anchor_to_anchor(&buffer, anchor, cx) + .snapshot(cx) + .anchor_in_excerpt(anchor) else { return Task::ready(Ok(Navigated::No)); }; @@ -324,16 +325,13 @@ pub fn show_link_definition( return; } - let trigger_anchor = trigger_point.anchor(); - let anchor = snapshot.buffer_snapshot().anchor_before(*trigger_anchor); - let Some(buffer) = editor.buffer().read(cx).buffer_for_anchor(anchor, cx) else { + let anchor = trigger_point.anchor().bias_left(snapshot.buffer_snapshot()); + let Some((anchor, _)) = snapshot.buffer_snapshot().anchor_to_buffer_anchor(anchor) else { + return; + }; + let Some(buffer) = editor.buffer.read(cx).buffer(anchor.buffer_id) else { return; }; - let Anchor { - excerpt_id, - text_anchor, - .. - } = anchor; let same_kind = hovered_link_state.preferred_kind == preferred_kind || hovered_link_state .links @@ -363,39 +361,39 @@ pub fn show_link_definition( async move { let result = match &trigger_point { TriggerPoint::Text(_) => { - if let Some((url_range, url)) = find_url(&buffer, text_anchor, cx.clone()) { + if let Some((url_range, url)) = find_url(&buffer, anchor, cx.clone()) { this.read_with(cx, |_, _| { let range = maybe!({ let range = - snapshot.anchor_range_in_excerpt(excerpt_id, url_range)?; + snapshot.buffer_anchor_range_to_anchor_range(url_range)?; Some(RangeInEditor::Text(range)) }); (range, vec![HoverLink::Url(url)]) }) .ok() } else if let Some((filename_range, filename)) = - find_file(&buffer, project.clone(), text_anchor, cx).await + find_file(&buffer, project.clone(), anchor, cx).await { let range = maybe!({ let range = - snapshot.anchor_range_in_excerpt(excerpt_id, filename_range)?; + snapshot.buffer_anchor_range_to_anchor_range(filename_range)?; Some(RangeInEditor::Text(range)) }); Some((range, vec![HoverLink::File(filename)])) } else if let Some(provider) = provider { let task = cx.update(|_, cx| { - provider.definitions(&buffer, text_anchor, preferred_kind, cx) + provider.definitions(&buffer, anchor, preferred_kind, cx) })?; if let Some(task) = task { task.await.ok().flatten().map(|definition_result| { ( definition_result.iter().find_map(|link| { link.origin.as_ref().and_then(|origin| { - let range = snapshot.anchor_range_in_excerpt( - excerpt_id, - origin.range.clone(), - )?; + let range = snapshot + .buffer_anchor_range_to_anchor_range( + origin.range.clone(), + )?; Some(RangeInEditor::Text(range)) }) }), @@ -1168,7 +1166,7 @@ mod tests { }); cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key()); cx.background_executor.run_until_parked(); - assert!(requests.try_next().is_err()); + assert!(requests.try_recv().is_err()); cx.assert_editor_text_highlights( HighlightKey::HoveredLinkState, indoc! {" @@ -1602,7 +1600,11 @@ mod tests { cx.set_state(input); let (position, snapshot) = cx.editor(|editor, _, cx| { - let positions = editor.selections.newest_anchor().head().text_anchor; + let positions = editor + .selections + .newest_anchor() + .head() + .expect_text_anchor(); let snapshot = editor .buffer() .clone() diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 3bad6c97b6bcba4015331257a5b9a476dd0d3fd3..55350a9c679a10ea8597ae8c923c33af34d71360 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -275,12 +275,12 @@ fn show_hover( let snapshot = editor.snapshot(window, cx); - let (buffer, buffer_position) = editor + let (buffer_position, _) = editor .buffer .read(cx) - .text_anchor_for_position(anchor, cx)?; - - let (excerpt_id, _, _) = editor.buffer().read(cx).excerpt_containing(anchor, cx)?; + .snapshot(cx) + .anchor_to_buffer_anchor(anchor)?; + let buffer = editor.buffer.read(cx).buffer(buffer_position.buffer_id)?; let language_registry = editor .project() @@ -515,7 +515,7 @@ fn show_hover( .and_then(|range| { let range = snapshot .buffer_snapshot() - .anchor_range_in_excerpt(excerpt_id, range)?; + .buffer_anchor_range_to_anchor_range(range)?; Some(range) }) .or_else(|| { diff --git a/crates/editor/src/inlays.rs b/crates/editor/src/inlays.rs index 8c46e797cada703c9101fd91e670cbdd4ea713ac..689e273ce28310cb5051b0eae108b74de48d3ac1 100644 --- a/crates/editor/src/inlays.rs +++ b/crates/editor/src/inlays.rs @@ -45,6 +45,7 @@ impl InlaySplice { #[derive(Debug, Clone)] pub struct Inlay { pub id: InlayId, + // TODO this could be an ExcerptAnchor pub position: Anchor, pub content: InlayContent, } diff --git a/crates/editor/src/inlays/inlay_hints.rs b/crates/editor/src/inlays/inlay_hints.rs index 8422937ab81a392ad7d1187adcab765cc7f6875f..ac3133ea89c5da7cd861d608bcbd61975ee9535c 100644 --- a/crates/editor/src/inlays/inlay_hints.rs +++ b/crates/editor/src/inlays/inlay_hints.rs @@ -14,7 +14,7 @@ use language::{ language_settings::{InlayHintKind, InlayHintSettings}, }; use lsp::LanguageServerId; -use multi_buffer::{Anchor, ExcerptId, MultiBufferSnapshot}; +use multi_buffer::{Anchor, MultiBufferSnapshot}; use project::{ HoverBlock, HoverBlockKind, InlayHintLabel, InlayHintLabelPartTooltip, InlayHintTooltip, InvalidationStrategy, ResolveState, @@ -110,14 +110,15 @@ impl LspInlayHintData { &mut self, buffer_ids: &HashSet, current_hints: impl IntoIterator, + snapshot: &MultiBufferSnapshot, ) { for buffer_id in buffer_ids { self.hint_refresh_tasks.remove(buffer_id); self.hint_chunk_fetching.remove(buffer_id); } for hint in current_hints { - if let Some(buffer_id) = hint.position.text_anchor.buffer_id { - if buffer_ids.contains(&buffer_id) { + if let Some((text_anchor, _)) = snapshot.anchor_to_buffer_anchor(hint.position) { + if buffer_ids.contains(&text_anchor.buffer_id) { self.added_hints.remove(&hint.id); } } @@ -237,7 +238,7 @@ pub enum InlayHintRefreshReason { server_id: LanguageServerId, request_id: Option, }, - ExcerptsRemoved(Vec), + BuffersRemoved(Vec), } impl Editor { @@ -303,7 +304,7 @@ impl Editor { let debounce = match &reason { InlayHintRefreshReason::SettingsChange(_) | InlayHintRefreshReason::Toggle(_) - | InlayHintRefreshReason::ExcerptsRemoved(_) + | InlayHintRefreshReason::BuffersRemoved(_) | InlayHintRefreshReason::ModifiersChanged(_) => None, _may_need_lsp_call => self.inlay_hints.as_ref().and_then(|inlay_hints| { if invalidate_cache.should_invalidate() { @@ -314,7 +315,8 @@ impl Editor { }), }; - let mut visible_excerpts = self.visible_excerpts(true, cx); + let mut visible_excerpts = self.visible_buffer_ranges(cx); + visible_excerpts.retain(|(snapshot, _, _)| self.is_lsp_relevant(snapshot.file(), cx)); let mut invalidate_hints_for_buffers = HashSet::default(); let ignore_previous_fetches = match reason { @@ -324,7 +326,7 @@ impl Editor { | InlayHintRefreshReason::ServerRemoved => true, InlayHintRefreshReason::NewLinesShown | InlayHintRefreshReason::RefreshRequested { .. } - | InlayHintRefreshReason::ExcerptsRemoved(_) => false, + | InlayHintRefreshReason::BuffersRemoved(_) => false, InlayHintRefreshReason::BufferEdited(buffer_id) => { let Some(affected_language) = self .buffer() @@ -351,8 +353,8 @@ impl Editor { ); semantics_provider.invalidate_inlay_hints(&invalidate_hints_for_buffers, cx); - visible_excerpts.retain(|_, (visible_buffer, _, _)| { - visible_buffer.read(cx).language() == Some(&affected_language) + visible_excerpts.retain(|(buffer_snapshot, _, _)| { + buffer_snapshot.language() == Some(&affected_language) }); false } @@ -371,6 +373,7 @@ impl Editor { inlay_hints.clear_for_buffers( &invalidate_hints_for_buffers, Self::visible_inlay_hints(self.display_map.read(cx)), + &multi_buffer.read(cx).snapshot(cx), ); } } @@ -379,14 +382,18 @@ impl Editor { .extend(invalidate_hints_for_buffers); let mut buffers_to_query = HashMap::default(); - for (_, (buffer, buffer_version, visible_range)) in visible_excerpts { - let buffer_id = buffer.read(cx).remote_id(); + for (buffer_snapshot, visible_range, _) in visible_excerpts { + let buffer_id = buffer_snapshot.remote_id(); if !self.registered_buffers.contains_key(&buffer_id) { continue; } - let buffer_snapshot = buffer.read(cx).snapshot(); + let Some(buffer) = multi_buffer.read(cx).buffer(buffer_id) else { + continue; + }; + + let buffer_version = buffer_snapshot.version().clone(); let buffer_anchor_range = buffer_snapshot.anchor_before(visible_range.start) ..buffer_snapshot.anchor_after(visible_range.end); @@ -514,13 +521,14 @@ impl Editor { } } } - InlayHintRefreshReason::ExcerptsRemoved(excerpts_removed) => { + InlayHintRefreshReason::BuffersRemoved(buffers_removed) => { let to_remove = self .display_map .read(cx) .current_inlays() .filter_map(|inlay| { - if excerpts_removed.contains(&inlay.position.excerpt_id) { + let anchor = inlay.position.raw_text_anchor()?; + if buffers_removed.contains(&anchor.buffer_id) { Some(inlay.id) } else { None @@ -610,13 +618,11 @@ impl Editor { }) .max_by_key(|hint| hint.id) { - if let Some(ResolvedHint::Resolved(cached_hint)) = hovered_hint - .position - .text_anchor - .buffer_id - .and_then(|buffer_id| { + if let Some(ResolvedHint::Resolved(cached_hint)) = buffer_snapshot + .anchor_to_buffer_anchor(hovered_hint.position) + .and_then(|(anchor, _)| { lsp_store.update(cx, |lsp_store, cx| { - lsp_store.resolved_hint(buffer_id, hovered_hint.id, cx) + lsp_store.resolved_hint(anchor.buffer_id, hovered_hint.id, cx) }) }) { @@ -787,15 +793,19 @@ impl Editor { new_hints: Vec<(Range, anyhow::Result)>, cx: &mut Context, ) { + let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); let visible_inlay_hint_ids = Self::visible_inlay_hints(self.display_map.read(cx)) - .filter(|inlay| inlay.position.text_anchor.buffer_id == Some(buffer_id)) + .filter(|inlay| { + multi_buffer_snapshot + .anchor_to_buffer_anchor(inlay.position) + .map(|(anchor, _)| anchor.buffer_id) + == Some(buffer_id) + }) .map(|inlay| inlay.id) .collect::>(); let Some(inlay_hints) = &mut self.inlay_hints else { return; }; - - let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); let Some(buffer_snapshot) = self .buffer .read(cx) @@ -910,12 +920,10 @@ impl Editor { hints_to_remove.extend( Self::visible_inlay_hints(self.display_map.read(cx)) .filter(|inlay| { - inlay - .position - .text_anchor - .buffer_id - .is_none_or(|buffer_id| { - invalidate_hints_for_buffers.contains(&buffer_id) + multi_buffer_snapshot + .anchor_to_buffer_anchor(inlay.position) + .is_none_or(|(anchor, _)| { + invalidate_hints_for_buffers.contains(&anchor.buffer_id) }) }) .map(|inlay| inlay.id), @@ -2285,17 +2293,15 @@ pub mod tests { cx: &mut gpui::TestAppContext, ) -> Range { let ranges = editor - .update(cx, |editor, _window, cx| editor.visible_excerpts(true, cx)) + .update(cx, |editor, _window, cx| editor.visible_buffer_ranges(cx)) .unwrap(); assert_eq!( ranges.len(), 1, "Single buffer should produce a single excerpt with visible range" ); - let (_, (excerpt_buffer, _, excerpt_visible_range)) = ranges.into_iter().next().unwrap(); - excerpt_buffer.read_with(cx, |buffer, _| { - excerpt_visible_range.to_point(&buffer.snapshot()) - }) + let (buffer_snapshot, visible_range, _) = ranges.into_iter().next().unwrap(); + visible_range.to_point(&buffer_snapshot) } #[gpui::test] @@ -2968,7 +2974,7 @@ let c = 3;"# .await .unwrap(); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - let (buffer_1_excerpts, buffer_2_excerpts) = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( PathKey::sorted(0), buffer_1.clone(), @@ -2983,15 +2989,8 @@ let c = 3;"# 0, cx, ); - let excerpt_ids = multibuffer.excerpt_ids(); - let buffer_1_excerpts = vec![excerpt_ids[0]]; - let buffer_2_excerpts = vec![excerpt_ids[1]]; - (buffer_1_excerpts, buffer_2_excerpts) }); - assert!(!buffer_1_excerpts.is_empty()); - assert!(!buffer_2_excerpts.is_empty()); - cx.executor().run_until_parked(); let editor = cx.add_window(|window, cx| { Editor::for_multibuffer(multibuffer, Some(project.clone()), window, cx) @@ -3092,7 +3091,7 @@ let c = 3;"# editor .update(cx, |editor, _, cx| { editor.buffer().update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts_for_path(PathKey::sorted(1), cx); + multibuffer.remove_excerpts(PathKey::sorted(1), cx); }) }) .unwrap(); diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index d14078e79abdbfe40879da09221bad7bef47475a..d2c157014330cc26f0024ace87ee0e3688f85eaa 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1,7 +1,7 @@ use crate::{ ActiveDebugLine, Anchor, Autoscroll, BufferSerialization, Capability, Editor, EditorEvent, - EditorSettings, ExcerptId, ExcerptRange, FormatTarget, MultiBuffer, MultiBufferSnapshot, - NavigationData, ReportEditorEvent, SelectionEffects, ToPoint as _, + EditorSettings, ExcerptRange, FormatTarget, MultiBuffer, MultiBufferSnapshot, NavigationData, + ReportEditorEvent, SelectionEffects, ToPoint as _, display_map::HighlightKey, editor_settings::SeedQuerySetting, persistence::{EditorDb, SerializedEditor}, @@ -22,7 +22,7 @@ use language::{ SelectionGoal, proto::serialize_anchor as serialize_text_anchor, }; use lsp::DiagnosticSeverity; -use multi_buffer::MultiBufferOffset; +use multi_buffer::{MultiBufferOffset, PathKey}; use project::{ File, Project, ProjectItem as _, ProjectPath, lsp_store::FormatTrigger, project_settings::ProjectSettings, search::SearchQuery, @@ -33,14 +33,13 @@ use std::{ any::{Any, TypeId}, borrow::Cow, cmp::{self, Ordering}, - iter, ops::Range, path::{Path, PathBuf}, sync::Arc, }; use text::{BufferId, BufferSnapshot, Selection}; use ui::{IconDecorationKind, prelude::*}; -use util::{ResultExt, TryFutureExt, paths::PathExt}; +use util::{ResultExt, TryFutureExt, paths::PathExt, rel_path::RelPath}; use workspace::item::{Dedup, ItemSettings, SerializableItem, TabContentParams}; use workspace::{ CollaboratorId, ItemId, ItemNavHistory, ToolbarItemLocation, ViewId, Workspace, WorkspaceId, @@ -83,10 +82,11 @@ impl FollowableItem for Editor { }; let buffer_ids = state - .excerpts + .path_excerpts .iter() .map(|excerpt| excerpt.buffer_id) .collect::>(); + let buffers = project.update(cx, |project, cx| { buffer_ids .iter() @@ -106,38 +106,32 @@ impl FollowableItem for Editor { multibuffer = MultiBuffer::singleton(buffers.pop().unwrap(), cx) } else { multibuffer = MultiBuffer::new(project.read(cx).capability()); - let mut sorted_excerpts = state.excerpts.clone(); - sorted_excerpts.sort_by_key(|e| e.id); - let sorted_excerpts = sorted_excerpts.into_iter().peekable(); - - for excerpt in sorted_excerpts { - let Ok(buffer_id) = BufferId::new(excerpt.buffer_id) else { + for path_with_ranges in state.path_excerpts { + let Some(path_key) = + path_with_ranges.path_key.and_then(deserialize_path_key) + else { continue; }; - - let mut insert_position = ExcerptId::min(); - for e in &state.excerpts { - if e.id == excerpt.id { - break; - } - if e.id < excerpt.id { - insert_position = ExcerptId::from_proto(e.id); - } - } - - let buffer = - buffers.iter().find(|b| b.read(cx).remote_id() == buffer_id); - - let Some(excerpt) = deserialize_excerpt_range(excerpt) else { + let Some(buffer_id) = BufferId::new(path_with_ranges.buffer_id).ok() + else { continue; }; - - let Some(buffer) = buffer else { continue }; - - multibuffer.insert_excerpts_with_ids_after( - insert_position, + let Some(buffer) = + buffers.iter().find(|b| b.read(cx).remote_id() == buffer_id) + else { + continue; + }; + let buffer_snapshot = buffer.read(cx).snapshot(); + let ranges = path_with_ranges + .ranges + .into_iter() + .filter_map(deserialize_excerpt_range) + .collect::>(); + multibuffer.update_path_excerpts( + path_key, buffer.clone(), - [excerpt], + &buffer_snapshot, + &ranges, cx, ); } @@ -158,6 +152,7 @@ impl FollowableItem for Editor { }) })?; + editor.update(cx, |editor, cx| editor.text(cx)); update_editor_from_message( editor.downgrade(), project, @@ -215,38 +210,43 @@ impl FollowableItem for Editor { let display_snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let scroll_anchor = self.scroll_manager.native_anchor(&display_snapshot, cx); let buffer = self.buffer.read(cx); - let excerpts = buffer - .read(cx) - .excerpts() - .map(|(id, buffer, range)| proto::Excerpt { - id: id.to_proto(), - buffer_id: buffer.remote_id().into(), - context_start: Some(serialize_text_anchor(&range.context.start)), - context_end: Some(serialize_text_anchor(&range.context.end)), - primary_start: Some(serialize_text_anchor(&range.primary.start)), - primary_end: Some(serialize_text_anchor(&range.primary.end)), - }) - .collect(); let snapshot = buffer.snapshot(cx); + let mut path_excerpts: Vec = Vec::new(); + for excerpt in snapshot.excerpts() { + if let Some(prev_entry) = path_excerpts.last_mut() + && prev_entry.buffer_id == excerpt.context.start.buffer_id.to_proto() + { + prev_entry.ranges.push(serialize_excerpt_range(excerpt)); + } else if let Some(path_key) = snapshot.path_for_buffer(excerpt.context.start.buffer_id) + { + path_excerpts.push(proto::PathExcerpts { + path_key: Some(serialize_path_key(path_key)), + buffer_id: excerpt.context.start.buffer_id.to_proto(), + ranges: vec![serialize_excerpt_range(excerpt)], + }); + } + } Some(proto::view::Variant::Editor(proto::view::Editor { singleton: buffer.is_singleton(), title: buffer.explicit_title().map(ToOwned::to_owned), - excerpts, - scroll_top_anchor: Some(serialize_anchor(&scroll_anchor.anchor, &snapshot)), + excerpts: Vec::new(), + scroll_top_anchor: Some(serialize_anchor(&scroll_anchor.anchor)), scroll_x: scroll_anchor.offset.x, scroll_y: scroll_anchor.offset.y, selections: self .selections .disjoint_anchors_arc() .iter() - .map(|s| serialize_selection(s, &snapshot)) + .map(serialize_selection) .collect(), pending_selection: self .selections .pending_anchor() .as_ref() - .map(|s| serialize_selection(s, &snapshot)), + .copied() + .map(serialize_selection), + path_excerpts, })) } @@ -277,56 +277,52 @@ impl FollowableItem for Editor { match update { proto::update_view::Variant::Editor(update) => match event { - EditorEvent::ExcerptsAdded { + EditorEvent::BufferRangesUpdated { buffer, - predecessor, - excerpts, + path_key, + ranges, } => { - let buffer_id = buffer.read(cx).remote_id(); - let mut excerpts = excerpts.iter(); - if let Some((id, range)) = excerpts.next() { - update.inserted_excerpts.push(proto::ExcerptInsertion { - previous_excerpt_id: Some(predecessor.to_proto()), - excerpt: serialize_excerpt(buffer_id, id, range), - }); - update.inserted_excerpts.extend(excerpts.map(|(id, range)| { - proto::ExcerptInsertion { - previous_excerpt_id: None, - excerpt: serialize_excerpt(buffer_id, id, range), - } - })) - } + let buffer_id = buffer.read(cx).remote_id().to_proto(); + let path_key = serialize_path_key(path_key); + let ranges = ranges + .iter() + .cloned() + .map(serialize_excerpt_range) + .collect::>(); + update.updated_paths.push(proto::PathExcerpts { + path_key: Some(path_key), + buffer_id, + ranges, + }); true } - EditorEvent::ExcerptsRemoved { ids, .. } => { + EditorEvent::BuffersRemoved { removed_buffer_ids } => { update - .deleted_excerpts - .extend(ids.iter().copied().map(ExcerptId::to_proto)); + .deleted_buffers + .extend(removed_buffer_ids.iter().copied().map(BufferId::to_proto)); true } EditorEvent::ScrollPositionChanged { autoscroll, .. } if !autoscroll => { let display_snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx)); - let snapshot = self.buffer.read(cx).snapshot(cx); let scroll_anchor = self.scroll_manager.native_anchor(&display_snapshot, cx); - update.scroll_top_anchor = - Some(serialize_anchor(&scroll_anchor.anchor, &snapshot)); + update.scroll_top_anchor = Some(serialize_anchor(&scroll_anchor.anchor)); update.scroll_x = scroll_anchor.offset.x; update.scroll_y = scroll_anchor.offset.y; true } EditorEvent::SelectionsChanged { .. } => { - let snapshot = self.buffer.read(cx).snapshot(cx); update.selections = self .selections .disjoint_anchors_arc() .iter() - .map(|s| serialize_selection(s, &snapshot)) + .map(serialize_selection) .collect(); update.pending_selection = self .selections .pending_anchor() .as_ref() - .map(|s| serialize_selection(s, &snapshot)); + .copied() + .map(serialize_selection); true } _ => false, @@ -370,7 +366,7 @@ impl FollowableItem for Editor { ) { let buffer = self.buffer.read(cx); let buffer = buffer.read(cx); - let Some(position) = buffer.as_singleton_anchor(location) else { + let Some(position) = buffer.anchor_in_excerpt(location) else { return; }; let selection = Selection { @@ -394,9 +390,9 @@ async fn update_editor_from_message( ) -> Result<()> { // Open all of the buffers of which excerpts were added to the editor. let inserted_excerpt_buffer_ids = message - .inserted_excerpts + .updated_paths .iter() - .filter_map(|insertion| Some(insertion.excerpt.as_ref()?.buffer_id)) + .map(|insertion| insertion.buffer_id) .collect::>(); let inserted_excerpt_buffers = project.update(cx, |project, cx| { inserted_excerpt_buffer_ids @@ -407,66 +403,53 @@ async fn update_editor_from_message( let _inserted_excerpt_buffers = try_join_all(inserted_excerpt_buffers).await?; // Update the editor's excerpts. - this.update(cx, |editor, cx| { + let buffer_snapshot = this.update(cx, |editor, cx| { editor.buffer.update(cx, |multibuffer, cx| { - let mut removed_excerpt_ids = message - .deleted_excerpts - .into_iter() - .map(ExcerptId::from_proto) - .collect::>(); - removed_excerpt_ids.sort_by({ - let multibuffer = multibuffer.read(cx); - move |a, b| a.cmp(b, &multibuffer) - }); - - let mut insertions = message.inserted_excerpts.into_iter().peekable(); - while let Some(insertion) = insertions.next() { - let Some(excerpt) = insertion.excerpt else { + for path_with_excerpts in message.updated_paths { + let Some(path_key) = path_with_excerpts.path_key.and_then(deserialize_path_key) + else { continue; }; - let Some(previous_excerpt_id) = insertion.previous_excerpt_id else { - continue; - }; - let buffer_id = BufferId::new(excerpt.buffer_id)?; - let Some(buffer) = project.read(cx).buffer_for_id(buffer_id, cx) else { + let ranges = path_with_excerpts + .ranges + .into_iter() + .filter_map(deserialize_excerpt_range) + .collect::>(); + let Some(buffer) = BufferId::new(path_with_excerpts.buffer_id) + .ok() + .and_then(|buffer_id| project.read(cx).buffer_for_id(buffer_id, cx)) + else { continue; }; - let adjacent_excerpts = iter::from_fn(|| { - let insertion = insertions.peek()?; - if insertion.previous_excerpt_id.is_none() - && insertion.excerpt.as_ref()?.buffer_id == u64::from(buffer_id) - { - insertions.next()?.excerpt - } else { - None - } - }); + let buffer_snapshot = buffer.read(cx).snapshot(); + multibuffer.update_path_excerpts(path_key, buffer, &buffer_snapshot, &ranges, cx); + } - multibuffer.insert_excerpts_with_ids_after( - ExcerptId::from_proto(previous_excerpt_id), - buffer, - [excerpt] - .into_iter() - .chain(adjacent_excerpts) - .filter_map(deserialize_excerpt_range), - cx, - ); + for buffer_id in message + .deleted_buffers + .into_iter() + .filter_map(|buffer_id| BufferId::new(buffer_id).ok()) + { + multibuffer.remove_excerpts_for_buffer(buffer_id, cx); } - multibuffer.remove_excerpts(removed_excerpt_ids, cx); - anyhow::Ok(()) + multibuffer.snapshot(cx) }) - })??; + })?; // Deserialize the editor state. let selections = message .selections .into_iter() - .filter_map(deserialize_selection) + .filter_map(|selection| deserialize_selection(selection, &buffer_snapshot)) .collect::>(); - let pending_selection = message.pending_selection.and_then(deserialize_selection); - let scroll_top_anchor = message.scroll_top_anchor.and_then(deserialize_anchor); + let pending_selection = message + .pending_selection + .and_then(|selection| deserialize_selection(selection, &buffer_snapshot)); + let scroll_top_anchor = message + .scroll_top_anchor + .and_then(|selection| deserialize_anchor(selection, &buffer_snapshot)); // Wait until the buffer has received all of the operations referenced by // the editor's new state. @@ -503,79 +486,103 @@ async fn update_editor_from_message( Ok(()) } -fn serialize_excerpt( - buffer_id: BufferId, - id: &ExcerptId, - range: &ExcerptRange, -) -> Option { - Some(proto::Excerpt { - id: id.to_proto(), - buffer_id: buffer_id.into(), - context_start: Some(serialize_text_anchor(&range.context.start)), - context_end: Some(serialize_text_anchor(&range.context.end)), - primary_start: Some(serialize_text_anchor(&range.primary.start)), - primary_end: Some(serialize_text_anchor(&range.primary.end)), - }) -} - -fn serialize_selection( - selection: &Selection, - buffer: &MultiBufferSnapshot, -) -> proto::Selection { +fn serialize_selection(selection: &Selection) -> proto::Selection { proto::Selection { id: selection.id as u64, - start: Some(serialize_anchor(&selection.start, buffer)), - end: Some(serialize_anchor(&selection.end, buffer)), + start: Some(serialize_anchor(&selection.start)), + end: Some(serialize_anchor(&selection.end)), reversed: selection.reversed, } } -fn serialize_anchor(anchor: &Anchor, buffer: &MultiBufferSnapshot) -> proto::EditorAnchor { - proto::EditorAnchor { - excerpt_id: buffer.latest_excerpt_id(anchor.excerpt_id).to_proto(), - anchor: Some(serialize_text_anchor(&anchor.text_anchor)), +fn serialize_anchor(anchor: &Anchor) -> proto::EditorAnchor { + match anchor { + Anchor::Min => proto::EditorAnchor { + excerpt_id: None, + anchor: Some(proto::Anchor { + replica_id: 0, + timestamp: 0, + offset: 0, + bias: proto::Bias::Left as i32, + buffer_id: None, + }), + }, + Anchor::Excerpt(_) => proto::EditorAnchor { + excerpt_id: None, + anchor: anchor.raw_text_anchor().map(|a| serialize_text_anchor(&a)), + }, + Anchor::Max => proto::EditorAnchor { + excerpt_id: None, + anchor: Some(proto::Anchor { + replica_id: u32::MAX, + timestamp: u32::MAX, + offset: u64::MAX, + bias: proto::Bias::Right as i32, + buffer_id: None, + }), + }, + } +} + +fn serialize_excerpt_range(range: ExcerptRange) -> proto::ExcerptRange { + let context_start = language::proto::serialize_anchor(&range.context.start); + let context_end = language::proto::serialize_anchor(&range.context.end); + let primary_start = language::proto::serialize_anchor(&range.primary.start); + let primary_end = language::proto::serialize_anchor(&range.primary.end); + proto::ExcerptRange { + context_start: Some(context_start), + context_end: Some(context_end), + primary_start: Some(primary_start), + primary_end: Some(primary_end), } } fn deserialize_excerpt_range( - excerpt: proto::Excerpt, -) -> Option<(ExcerptId, ExcerptRange)> { + excerpt_range: proto::ExcerptRange, +) -> Option> { let context = { - let start = language::proto::deserialize_anchor(excerpt.context_start?)?; - let end = language::proto::deserialize_anchor(excerpt.context_end?)?; + let start = language::proto::deserialize_anchor(excerpt_range.context_start?)?; + let end = language::proto::deserialize_anchor(excerpt_range.context_end?)?; start..end }; - let primary = excerpt + let primary = excerpt_range .primary_start - .zip(excerpt.primary_end) + .zip(excerpt_range.primary_end) .and_then(|(start, end)| { let start = language::proto::deserialize_anchor(start)?; let end = language::proto::deserialize_anchor(end)?; Some(start..end) }) .unwrap_or_else(|| context.clone()); - Some(( - ExcerptId::from_proto(excerpt.id), - ExcerptRange { context, primary }, - )) + Some(ExcerptRange { context, primary }) } -fn deserialize_selection(selection: proto::Selection) -> Option> { +fn deserialize_selection( + selection: proto::Selection, + buffer: &MultiBufferSnapshot, +) -> Option> { Some(Selection { id: selection.id as usize, - start: deserialize_anchor(selection.start?)?, - end: deserialize_anchor(selection.end?)?, + start: deserialize_anchor(selection.start?, buffer)?, + end: deserialize_anchor(selection.end?, buffer)?, reversed: selection.reversed, goal: SelectionGoal::None, }) } -fn deserialize_anchor(anchor: proto::EditorAnchor) -> Option { - let excerpt_id = ExcerptId::from_proto(anchor.excerpt_id); - Some(Anchor::in_buffer( - excerpt_id, - language::proto::deserialize_anchor(anchor.anchor?)?, - )) +fn deserialize_anchor(anchor: proto::EditorAnchor, buffer: &MultiBufferSnapshot) -> Option { + let anchor = anchor.anchor?; + if let Some(buffer_id) = anchor.buffer_id + && BufferId::new(buffer_id).is_ok() + { + let text_anchor = language::proto::deserialize_anchor(anchor)?; + buffer.anchor_in_buffer(text_anchor) + } else { + match proto::Bias::from_i32(anchor.bias)? { + proto::Bias::Left => Some(Anchor::Min), + proto::Bias::Right => Some(Anchor::Max), + } + } } impl Item for Editor { @@ -1071,7 +1078,7 @@ impl Item for Editor { f(ItemEvent::UpdateBreadcrumbs); } - EditorEvent::ExcerptsAdded { .. } | EditorEvent::ExcerptsRemoved { .. } => { + EditorEvent::BufferRangesUpdated { .. } | EditorEvent::BuffersRemoved { .. } => { f(ItemEvent::Edit); } @@ -1434,9 +1441,9 @@ impl ProjectItem for Editor { cx: &mut Context, ) -> Self { let mut editor = Self::for_buffer(buffer.clone(), Some(project), window, cx); + let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx); - if let Some((excerpt_id, _, snapshot)) = - editor.buffer().read(cx).snapshot(cx).as_singleton() + if let Some(buffer_snapshot) = editor.buffer().read(cx).snapshot(cx).as_singleton() && WorkspaceSettings::get(None, cx).restore_on_file_reopen && let Some(restoration_data) = Self::project_item_kind() .and_then(|kind| pane.as_ref()?.project_item_restoration_data.get(&kind)) @@ -1448,7 +1455,7 @@ impl ProjectItem for Editor { { if !restoration_data.folds.is_empty() { editor.fold_ranges( - clip_ranges(&restoration_data.folds, snapshot), + clip_ranges(&restoration_data.folds, buffer_snapshot), false, window, cx, @@ -1456,12 +1463,11 @@ impl ProjectItem for Editor { } if !restoration_data.selections.is_empty() { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges(clip_ranges(&restoration_data.selections, snapshot)); + s.select_ranges(clip_ranges(&restoration_data.selections, buffer_snapshot)); }); } let (top_row, offset) = restoration_data.scroll_position; - let anchor = - Anchor::in_buffer(excerpt_id, snapshot.anchor_before(Point::new(top_row, 0))); + let anchor = multibuffer_snapshot.anchor_before(Point::new(top_row, 0)); editor.set_scroll_anchor(ScrollAnchor { anchor, offset }, window, cx); } @@ -1624,6 +1630,7 @@ impl SearchableItem for Editor { regex: true, replacement: false, selection: false, + select_all: true, find_in_results: true, } } else { @@ -1633,6 +1640,7 @@ impl SearchableItem for Editor { regex: true, replacement: true, selection: true, + select_all: true, find_in_results: false, } } @@ -1838,7 +1846,7 @@ impl SearchableItem for Editor { }; for range in search_within_ranges { - for (search_buffer, search_range, excerpt_id, deleted_hunk_anchor) in + for (search_buffer, search_range, deleted_hunk_anchor) in buffer.range_to_buffer_ranges_with_deleted_hunks(range) { ranges.extend( @@ -1849,20 +1857,22 @@ impl SearchableItem for Editor { ) .await .into_iter() - .map(|match_range| { + .filter_map(|match_range| { if let Some(deleted_hunk_anchor) = deleted_hunk_anchor { let start = search_buffer .anchor_after(search_range.start + match_range.start); let end = search_buffer .anchor_before(search_range.start + match_range.end); - deleted_hunk_anchor.with_diff_base_anchor(start) - ..deleted_hunk_anchor.with_diff_base_anchor(end) + Some( + deleted_hunk_anchor.with_diff_base_anchor(start) + ..deleted_hunk_anchor.with_diff_base_anchor(end), + ) } else { let start = search_buffer .anchor_after(search_range.start + match_range.start); let end = search_buffer .anchor_before(search_range.start + match_range.end); - Anchor::range_in_buffer(excerpt_id, start..end) + buffer.buffer_anchor_range_to_anchor_range(start..end) } }), ); @@ -2050,6 +2060,20 @@ fn restore_serialized_buffer_contents( } } +fn serialize_path_key(path_key: &PathKey) -> proto::PathKey { + proto::PathKey { + sort_prefix: path_key.sort_prefix, + path: path_key.path.to_proto(), + } +} + +fn deserialize_path_key(path_key: proto::PathKey) -> Option { + Some(PathKey { + sort_prefix: path_key.sort_prefix, + path: RelPath::from_proto(&path_key.path).ok()?, + }) +} + #[cfg(test)] mod tests { use crate::editor_tests::init_test; diff --git a/crates/editor/src/jsx_tag_auto_close.rs b/crates/editor/src/jsx_tag_auto_close.rs index b91f039aff7cfb8bc7997cfbf63abb8dbe4662e5..d57941f6d082a929f6547c38ddbc21908304d76c 100644 --- a/crates/editor/src/jsx_tag_auto_close.rs +++ b/crates/editor/src/jsx_tag_auto_close.rs @@ -352,11 +352,12 @@ pub(crate) fn construct_initial_buffer_versions_map< } for (edit_range, _) in edits { - let edit_range_buffer = editor - .buffer() - .read(cx) - .excerpt_containing(edit_range.end, cx) - .map(|e| e.1); + let multibuffer = editor.buffer.read(cx); + let snapshot = multibuffer.snapshot(cx); + let anchor = snapshot.anchor_before(edit_range.end); + let edit_range_buffer = snapshot + .anchor_to_buffer_anchor(anchor) + .and_then(|(text_anchor, _)| multibuffer.buffer(text_anchor.buffer_id)); if let Some(buffer) = edit_range_buffer { let (buffer_id, buffer_version) = buffer.read_with(cx, |buffer, _| (buffer.remote_id(), buffer.version.clone())); diff --git a/crates/editor/src/linked_editing_ranges.rs b/crates/editor/src/linked_editing_ranges.rs index ccd0e64bd850f6ce84e225fe77f1c0a0d5385dc1..148bb27addecfb4982625a2d6129e7d3827d7883 100644 --- a/crates/editor/src/linked_editing_ranges.rs +++ b/crates/editor/src/linked_editing_ranges.rs @@ -2,7 +2,6 @@ use collections::HashMap; use gpui::{AppContext, Context, Entity, Window}; use itertools::Itertools; use language::Buffer; -use multi_buffer::MultiBufferOffset; use std::{ops::Range, sync::Arc, time::Duration}; use text::{Anchor, AnchorRangeExt, Bias, BufferId, ToOffset, ToPoint}; use util::ResultExt; @@ -62,27 +61,15 @@ pub(super) fn refresh_linked_ranges( editor .update(cx, |editor, cx| { let display_snapshot = editor.display_snapshot(cx); - let selections = editor - .selections - .all::(&display_snapshot); + let selections = editor.selections.all_anchors(&display_snapshot); let snapshot = display_snapshot.buffer_snapshot(); let buffer = editor.buffer.read(cx); - for selection in selections { - let cursor_position = selection.head(); - let start_position = snapshot.anchor_before(cursor_position); - let end_position = snapshot.anchor_after(selection.tail()); - if start_position.text_anchor.buffer_id != end_position.text_anchor.buffer_id - || end_position.text_anchor.buffer_id.is_none() + for selection in selections.iter() { + if let Some((_, range)) = + snapshot.anchor_range_to_buffer_anchor_range(selection.range()) + && let Some(buffer) = buffer.buffer(range.start.buffer_id) { - // Throw away selections spanning multiple buffers. - continue; - } - if let Some(buffer) = buffer.buffer_for_anchor(end_position, cx) { - applicable_selections.push(( - buffer, - start_position.text_anchor, - end_position.text_anchor, - )); + applicable_selections.push((buffer, range.start, range.end)); } } }) diff --git a/crates/editor/src/lsp_ext.rs b/crates/editor/src/lsp_ext.rs index ef0f92de79b0fe7a7e4a495dc29c1305b2f5eefa..6f9f94bc72227f7f30bdca1c9ae1ce436f3d5aa4 100644 --- a/crates/editor/src/lsp_ext.rs +++ b/crates/editor/src/lsp_ext.rs @@ -9,7 +9,6 @@ use language::Buffer; use language::Language; use lsp::LanguageServerId; use lsp::LanguageServerName; -use multi_buffer::Anchor; use project::LanguageServerToQuery; use project::LocationLink; use project::Project; @@ -27,7 +26,12 @@ pub(crate) fn find_specific_language_server_in_selection( cx: &mut App, filter_language: F, language_server_name: LanguageServerName, -) -> Option<(Anchor, Arc, LanguageServerId, Entity)> +) -> Option<( + text::Anchor, + Arc, + LanguageServerId, + Entity, +)> where F: Fn(&Language) -> bool, { @@ -40,19 +44,15 @@ where .iter() .find_map(|selection| { let multi_buffer = multi_buffer.read(cx); - let (position, buffer) = multi_buffer - .buffer_for_anchor(selection.head(), cx) - .map(|buffer| (selection.head(), buffer)) - .or_else(|| { - multi_buffer - .buffer_for_anchor(selection.tail(), cx) - .map(|buffer| (selection.tail(), buffer)) - })?; + let multi_buffer_snapshot = multi_buffer.snapshot(cx); + let (position, buffer) = multi_buffer_snapshot + .anchor_to_buffer_anchor(selection.head()) + .and_then(|(anchor, _)| Some((anchor, multi_buffer.buffer(anchor.buffer_id)?)))?; if !seen_buffer_ids.insert(buffer.read(cx).remote_id()) { return None; } - let language = buffer.read(cx).language_at(position.text_anchor)?; + let language = buffer.read(cx).language_at(position)?; if filter_language(&language) { let server_id = buffer.update(cx, |buffer, cx| { project @@ -108,7 +108,7 @@ pub fn lsp_tasks( let buffers = buffer_ids .iter() .filter(|&&buffer_id| match for_position { - Some(for_position) => for_position.buffer_id == Some(buffer_id), + Some(for_position) => for_position.buffer_id == buffer_id, None => true, }) .filter_map(|&buffer_id| project.read(cx).buffer_for_id(buffer_id, cx)) @@ -194,7 +194,7 @@ mod tests { use language::{FakeLspAdapter, Language}; use languages::rust_lang; use lsp::{LanguageServerId, LanguageServerName}; - use multi_buffer::{Anchor, MultiBuffer}; + use multi_buffer::MultiBuffer; use project::{FakeFs, Project}; use util::path; @@ -236,7 +236,7 @@ mod tests { let filter = |language: &Language| language.name().as_ref() == "Rust"; let assert_result = |result: Option<( - Anchor, + text::Anchor, Arc, LanguageServerId, Entity, diff --git a/crates/editor/src/mouse_context_menu.rs b/crates/editor/src/mouse_context_menu.rs index 2ddbb48b5fc434f65521c6dd230537aedb71dabb..0028f52d3d91ca9e6ea660dec0628e7ca6b9e520 100644 --- a/crates/editor/src/mouse_context_menu.rs +++ b/crates/editor/src/mouse_context_menu.rs @@ -205,16 +205,17 @@ pub fn deploy_context_menu( .all::(&display_map) .into_iter() .any(|s| !s.is_empty()); - let has_git_repo = buffer - .buffer_id_for_anchor(anchor) - .is_some_and(|buffer_id| { - project - .read(cx) - .git_store() - .read(cx) - .repository_and_path_for_buffer_id(buffer_id, cx) - .is_some() - }); + let has_git_repo = + buffer + .anchor_to_buffer_anchor(anchor) + .is_some_and(|(buffer_anchor, _)| { + project + .read(cx) + .git_store() + .read(cx) + .repository_and_path_for_buffer_id(buffer_anchor.buffer_id, cx) + .is_some() + }); let evaluate_selection = window.is_action_available(&EvaluateSelectedText, cx); let run_to_cursor = window.is_action_available(&RunToCursor, cx); diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index 955f511577d2cbfede1a4cb4eb6d99e429c879d6..67869f770b81f315680388165111bbc1a2e0f111 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -588,22 +588,30 @@ pub fn start_of_excerpt( direction: Direction, ) -> DisplayPoint { let point = map.display_point_to_point(display_point, Bias::Left); - let Some(excerpt) = map.buffer_snapshot().excerpt_containing(point..point) else { + let Some((_, excerpt_range)) = map.buffer_snapshot().excerpt_containing(point..point) else { return display_point; }; match direction { Direction::Prev => { - let mut start = excerpt.start_anchor().to_display_point(map); + let Some(start_anchor) = map.anchor_in_excerpt(excerpt_range.context.start) else { + return display_point; + }; + let mut start = start_anchor.to_display_point(map); if start >= display_point && start.row() > DisplayRow(0) { - let Some(excerpt) = map.buffer_snapshot().excerpt_before(excerpt.id()) else { + let Some(excerpt) = map.buffer_snapshot().excerpt_before(start_anchor) else { return display_point; }; - start = excerpt.start_anchor().to_display_point(map); + if let Some(start_anchor) = map.anchor_in_excerpt(excerpt.context.start) { + start = start_anchor.to_display_point(map); + } } start } Direction::Next => { - let mut end = excerpt.end_anchor().to_display_point(map); + let Some(end_anchor) = map.anchor_in_excerpt(excerpt_range.context.end) else { + return display_point; + }; + let mut end = end_anchor.to_display_point(map); *end.row_mut() += 1; map.clip_point(end, Bias::Right) } @@ -616,12 +624,15 @@ pub fn end_of_excerpt( direction: Direction, ) -> DisplayPoint { let point = map.display_point_to_point(display_point, Bias::Left); - let Some(excerpt) = map.buffer_snapshot().excerpt_containing(point..point) else { + let Some((_, excerpt_range)) = map.buffer_snapshot().excerpt_containing(point..point) else { return display_point; }; match direction { Direction::Prev => { - let mut start = excerpt.start_anchor().to_display_point(map); + let Some(start_anchor) = map.anchor_in_excerpt(excerpt_range.context.start) else { + return display_point; + }; + let mut start = start_anchor.to_display_point(map); if start.row() > DisplayRow(0) { *start.row_mut() -= 1; } @@ -630,18 +641,23 @@ pub fn end_of_excerpt( start } Direction::Next => { - let mut end = excerpt.end_anchor().to_display_point(map); + let Some(end_anchor) = map.anchor_in_excerpt(excerpt_range.context.end) else { + return display_point; + }; + let mut end = end_anchor.to_display_point(map); *end.column_mut() = 0; if end <= display_point { *end.row_mut() += 1; let point_end = map.display_point_to_point(end, Bias::Right); - let Some(excerpt) = map + let Some((_, excerpt_range)) = map .buffer_snapshot() .excerpt_containing(point_end..point_end) else { return display_point; }; - end = excerpt.end_anchor().to_display_point(map); + if let Some(end_anchor) = map.anchor_in_excerpt(excerpt_range.context.end) { + end = end_anchor.to_display_point(map); + } *end.column_mut() = 0; } end diff --git a/crates/editor/src/runnables.rs b/crates/editor/src/runnables.rs index 92663ff9a96d1f84e2de387917e2d6a32b16aa00..f451eb7d61d6a2513e1ebf6ec96062b600cbecb6 100644 --- a/crates/editor/src/runnables.rs +++ b/crates/editor/src/runnables.rs @@ -8,9 +8,7 @@ use gpui::{ }; use language::{Buffer, BufferRow, Runnable}; use lsp::LanguageServerName; -use multi_buffer::{ - Anchor, BufferOffset, MultiBufferOffset, MultiBufferRow, MultiBufferSnapshot, ToPoint as _, -}; +use multi_buffer::{Anchor, BufferOffset, MultiBufferRow, MultiBufferSnapshot, ToPoint as _}; use project::{ Location, Project, TaskSourceKind, debugger::breakpoint_store::{Breakpoint, BreakpointSessionState}, @@ -165,7 +163,7 @@ impl Editor { .update(cx, |editor, cx| { let multi_buffer = editor.buffer().read(cx); if multi_buffer.is_singleton() { - Some((multi_buffer.snapshot(cx), Anchor::min()..Anchor::max())) + Some((multi_buffer.snapshot(cx), Anchor::Min..Anchor::Max)) } else { let display_snapshot = editor.display_map.update(cx, |map, cx| map.snapshot(cx)); @@ -209,16 +207,8 @@ impl Editor { .fold(HashMap::default(), |mut acc, (kind, location, task)| { let buffer = location.target.buffer; let buffer_snapshot = buffer.read(cx).snapshot(); - let offset = multi_buffer_snapshot.excerpts().find_map( - |(excerpt_id, snapshot, _)| { - if snapshot.remote_id() == buffer_snapshot.remote_id() { - multi_buffer_snapshot - .anchor_in_excerpt(excerpt_id, location.target.range.start) - } else { - None - } - }, - ); + let offset = + multi_buffer_snapshot.anchor_in_excerpt(location.target.range.start); if let Some(offset) = offset { let task_buffer_range = location.target.range.to_point(&buffer_snapshot); @@ -369,20 +359,23 @@ impl Editor { (selection, buffer, snapshot) }; let selection_range = selection.range(); - let start = editor_snapshot + let Some((_, range)) = editor_snapshot .display_snapshot .buffer_snapshot() - .anchor_after(selection_range.start) - .text_anchor; - let end = editor_snapshot - .display_snapshot - .buffer_snapshot() - .anchor_after(selection_range.end) - .text_anchor; - let location = Location { - buffer, - range: start..end, + .anchor_range_to_buffer_anchor_range( + editor_snapshot + .display_snapshot + .buffer_snapshot() + .anchor_after(selection_range.start) + ..editor_snapshot + .display_snapshot + .buffer_snapshot() + .anchor_before(selection_range.end), + ) + else { + return Task::ready(None); }; + let location = Location { buffer, range }; let captured_variables = { let mut variables = TaskVariables::default(); let buffer = location.buffer.read(cx); @@ -430,9 +423,9 @@ impl Editor { return HashMap::default(); } let buffers = if visible_only { - self.visible_excerpts(true, cx) - .into_values() - .map(|(buffer, _, _)| buffer) + self.visible_buffers(cx) + .into_iter() + .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx)) .collect() } else { self.buffer().read(cx).all_buffers() @@ -482,19 +475,15 @@ impl Editor { cx: &mut Context, ) -> Option<(Entity, u32, Arc)> { let snapshot = self.buffer.read(cx).snapshot(cx); - let offset = self - .selections - .newest::(&self.display_snapshot(cx)) - .head(); - let mut excerpt = snapshot.excerpt_containing(offset..offset)?; - let offset = excerpt.map_offset_to_buffer(offset); - let buffer_id = excerpt.buffer().remote_id(); + let anchor = self.selections.newest_anchor().head(); + let (anchor, buffer_snapshot) = snapshot.anchor_to_buffer_anchor(anchor)?; + let offset = anchor.to_offset(buffer_snapshot); - let layer = excerpt.buffer().syntax_layer_at(offset)?; + let layer = buffer_snapshot.syntax_layer_at(offset)?; let mut cursor = layer.node().walk(); - while cursor.goto_first_child_for_byte(offset.0).is_some() { - if cursor.node().end_byte() == offset.0 { + while cursor.goto_first_child_for_byte(offset).is_some() { + if cursor.node().end_byte() == offset { cursor.goto_next_sibling(); } } @@ -503,18 +492,18 @@ impl Editor { loop { let node = cursor.node(); let node_range = node.byte_range(); - let symbol_start_row = excerpt.buffer().offset_to_point(node.start_byte()).row; + let symbol_start_row = buffer_snapshot.offset_to_point(node.start_byte()).row; // Check if this node contains our offset - if node_range.start <= offset.0 && node_range.end >= offset.0 { + if node_range.start <= offset && node_range.end >= offset { // If it contains offset, check for task if let Some(tasks) = self .runnables .runnables - .get(&buffer_id) + .get(&buffer_snapshot.remote_id()) .and_then(|(_, tasks)| tasks.get(&symbol_start_row)) { - let buffer = self.buffer.read(cx).buffer(buffer_id)?; + let buffer = self.buffer.read(cx).buffer(buffer_snapshot.remote_id())?; return Some((buffer, symbol_start_row, Arc::new(tasks.to_owned()))); } } diff --git a/crates/editor/src/rust_analyzer_ext.rs b/crates/editor/src/rust_analyzer_ext.rs index 6ffdf1a248a0e605f623254bbfa36776adf77cda..6d4d599961761789dbf14c77cd3843b036d05b5e 100644 --- a/crates/editor/src/rust_analyzer_ext.rs +++ b/crates/editor/src/rust_analyzer_ext.rs @@ -88,7 +88,7 @@ pub fn go_to_parent_module( let request = proto::LspExtGoToParentModule { project_id, buffer_id: buffer_id.to_proto(), - position: Some(serialize_anchor(&trigger_anchor.text_anchor)), + position: Some(serialize_anchor(&trigger_anchor)), }; let response = client .request(request) @@ -106,7 +106,7 @@ pub fn go_to_parent_module( .context("go to parent module via collab")? } else { let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot); + let position = trigger_anchor.to_point_utf16(&buffer_snapshot); project .update(cx, |project, cx| { project.request_lsp( @@ -168,7 +168,7 @@ pub fn expand_macro_recursively( let request = proto::LspExtExpandMacro { project_id, buffer_id: buffer_id.to_proto(), - position: Some(serialize_anchor(&trigger_anchor.text_anchor)), + position: Some(serialize_anchor(&trigger_anchor)), }; let response = client .request(request) @@ -180,7 +180,7 @@ pub fn expand_macro_recursively( } } else { let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot); + let position = trigger_anchor.to_point_utf16(&buffer_snapshot); project .update(cx, |project, cx| { project.request_lsp( @@ -195,10 +195,7 @@ pub fn expand_macro_recursively( }; if macro_expansion.is_empty() { - log::info!( - "Empty macro expansion for position {:?}", - trigger_anchor.text_anchor - ); + log::info!("Empty macro expansion for position {:?}", trigger_anchor); return Ok(()); } @@ -260,7 +257,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu let request = proto::LspExtOpenDocs { project_id, buffer_id: buffer_id.to_proto(), - position: Some(serialize_anchor(&trigger_anchor.text_anchor)), + position: Some(serialize_anchor(&trigger_anchor)), }; let response = client .request(request) @@ -272,7 +269,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu } } else { let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot); + let position = trigger_anchor.to_point_utf16(&buffer_snapshot); project .update(cx, |project, cx| { project.request_lsp( @@ -287,10 +284,7 @@ pub fn open_docs(editor: &mut Editor, _: &OpenDocs, window: &mut Window, cx: &mu }; if docs_urls.is_empty() { - log::debug!( - "Empty docs urls for position {:?}", - trigger_anchor.text_anchor - ); + log::debug!("Empty docs urls for position {:?}", trigger_anchor); return Ok(()); } @@ -322,16 +316,18 @@ fn cancel_flycheck_action( let Some(project) = &editor.project else { return; }; + let multibuffer_snapshot = editor + .buffer + .read_with(cx, |buffer, cx| buffer.snapshot(cx)); let buffer_id = editor .selections .disjoint_anchors_arc() .iter() .find_map(|selection| { - let buffer_id = selection - .start - .text_anchor - .buffer_id - .or(selection.end.text_anchor.buffer_id)?; + let buffer_id = multibuffer_snapshot + .anchor_to_buffer_anchor(selection.start)? + .0 + .buffer_id; let project = project.read(cx); let entry_id = project .buffer_for_id(buffer_id, cx)? @@ -351,16 +347,18 @@ fn run_flycheck_action( let Some(project) = &editor.project else { return; }; + let multibuffer_snapshot = editor + .buffer + .read_with(cx, |buffer, cx| buffer.snapshot(cx)); let buffer_id = editor .selections .disjoint_anchors_arc() .iter() .find_map(|selection| { - let buffer_id = selection - .start - .text_anchor - .buffer_id - .or(selection.end.text_anchor.buffer_id)?; + let buffer_id = multibuffer_snapshot + .anchor_to_buffer_anchor(selection.head())? + .0 + .buffer_id; let project = project.read(cx); let entry_id = project .buffer_for_id(buffer_id, cx)? @@ -380,16 +378,18 @@ fn clear_flycheck_action( let Some(project) = &editor.project else { return; }; + let multibuffer_snapshot = editor + .buffer + .read_with(cx, |buffer, cx| buffer.snapshot(cx)); let buffer_id = editor .selections .disjoint_anchors_arc() .iter() .find_map(|selection| { - let buffer_id = selection - .start - .text_anchor - .buffer_id - .or(selection.end.text_anchor.buffer_id)?; + let buffer_id = multibuffer_snapshot + .anchor_to_buffer_anchor(selection.head())? + .0 + .buffer_id; let project = project.read(cx); let entry_id = project .buffer_for_id(buffer_id, cx)? diff --git a/crates/editor/src/scroll.rs b/crates/editor/src/scroll.rs index c2280e90f7d30d53c0818119df70b7c32161b78b..42b865b17ca4e241b8f0728488cacd42d52d257c 100644 --- a/crates/editor/src/scroll.rs +++ b/crates/editor/src/scroll.rs @@ -44,13 +44,13 @@ impl ScrollAnchor { pub(super) fn new() -> Self { Self { offset: gpui::Point::default(), - anchor: Anchor::min(), + anchor: Anchor::Min, } } pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> gpui::Point { self.offset.apply_along(Axis::Vertical, |offset| { - if self.anchor == Anchor::min() { + if self.anchor == Anchor::Min { 0. } else { let scroll_top = self.anchor.to_display_point(snapshot).row().as_f64(); diff --git a/crates/editor/src/scroll/actions.rs b/crates/editor/src/scroll/actions.rs index 3d22db2a4dc3c9339e51b0dae02d6d598400ad64..48438b6592a3a75c405fee496fbbd55091389a8f 100644 --- a/crates/editor/src/scroll/actions.rs +++ b/crates/editor/src/scroll/actions.rs @@ -78,7 +78,7 @@ impl Editor { let selection_head = self.selections.newest_display(&display_snapshot).head(); let sticky_headers_len = if EditorSettings::get_global(cx).sticky_scroll.enabled - && let Some((_, _, buffer_snapshot)) = display_snapshot.buffer_snapshot().as_singleton() + && let Some(buffer_snapshot) = display_snapshot.buffer_snapshot().as_singleton() { let select_head_point = rope::Point::new(selection_head.to_point(&display_snapshot).row, 0); diff --git a/crates/editor/src/selections_collection.rs b/crates/editor/src/selections_collection.rs index 7331205d22b779b17af2186757a6b96f59b5616c..51dcca149ce597df076a083f7d0bc3ad223edae2 100644 --- a/crates/editor/src/selections_collection.rs +++ b/crates/editor/src/selections_collection.rs @@ -4,7 +4,6 @@ use std::{ sync::Arc, }; -use collections::HashMap; use gpui::Pixels; use itertools::Itertools as _; use language::{Bias, Point, PointUtf16, Selection, SelectionGoal}; @@ -12,7 +11,7 @@ use multi_buffer::{MultiBufferDimension, MultiBufferOffset}; use util::post_inc; use crate::{ - Anchor, DisplayPoint, DisplayRow, ExcerptId, MultiBufferSnapshot, SelectMode, ToOffset, + Anchor, DisplayPoint, DisplayRow, MultiBufferSnapshot, SelectMode, ToOffset, display_map::{DisplaySnapshot, ToDisplayPoint}, movement::TextLayoutDetails, }; @@ -45,8 +44,8 @@ impl SelectionsCollection { pending: Some(PendingSelection { selection: Selection { id: 0, - start: Anchor::min(), - end: Anchor::min(), + start: Anchor::Min, + end: Anchor::Min, reversed: false, goal: SelectionGoal::None, }, @@ -547,13 +546,11 @@ impl SelectionsCollection { ); assert!( snapshot.can_resolve(&selection.start), - "disjoint selection start is not resolvable for the given snapshot:\n{selection:?}, {excerpt:?}", - excerpt = snapshot.buffer_for_excerpt(selection.start.excerpt_id).map(|snapshot| snapshot.remote_id()), + "disjoint selection start is not resolvable for the given snapshot:\n{selection:?}", ); assert!( snapshot.can_resolve(&selection.end), - "disjoint selection end is not resolvable for the given snapshot: {selection:?}, {excerpt:?}", - excerpt = snapshot.buffer_for_excerpt(selection.end.excerpt_id).map(|snapshot| snapshot.remote_id()), + "disjoint selection start is not resolvable for the given snapshot:\n{selection:?}", ); }); assert!( @@ -572,17 +569,11 @@ impl SelectionsCollection { ); assert!( snapshot.can_resolve(&selection.start), - "pending selection start is not resolvable for the given snapshot: {pending:?}, {excerpt:?}", - excerpt = snapshot - .buffer_for_excerpt(selection.start.excerpt_id) - .map(|snapshot| snapshot.remote_id()), + "pending selection start is not resolvable for the given snapshot: {pending:?}", ); assert!( snapshot.can_resolve(&selection.end), - "pending selection end is not resolvable for the given snapshot: {pending:?}, {excerpt:?}", - excerpt = snapshot - .buffer_for_excerpt(selection.end.excerpt_id) - .map(|snapshot| snapshot.remote_id()), + "pending selection end is not resolvable for the given snapshot: {pending:?}", ); } } @@ -665,10 +656,10 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> { self.disjoint .iter() .filter(|selection| { - if let Some(selection_buffer_id) = - self.snapshot.buffer_id_for_anchor(selection.start) + if let Some((selection_buffer_anchor, _)) = + self.snapshot.anchor_to_buffer_anchor(selection.start) { - let should_remove = selection_buffer_id == buffer_id; + let should_remove = selection_buffer_anchor.buffer_id == buffer_id; changed |= should_remove; !should_remove } else { @@ -683,10 +674,8 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> { let buffer_snapshot = self.snapshot.buffer_snapshot(); let anchor = buffer_snapshot .excerpts() - .find(|(_, buffer, _)| buffer.remote_id() == buffer_id) - .and_then(|(excerpt_id, _, range)| { - buffer_snapshot.anchor_in_excerpt(excerpt_id, range.context.start) - }) + .find(|excerpt| excerpt.context.start.buffer_id == buffer_id) + .and_then(|excerpt| buffer_snapshot.anchor_in_excerpt(excerpt.context.start)) .unwrap_or_else(|| self.snapshot.anchor_before(MultiBufferOffset(0))); self.collection.disjoint = Arc::from([Selection { id: post_inc(&mut self.collection.next_selection_id), @@ -1077,80 +1066,6 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> { self.selections_changed = true; self.pending.as_mut().map(|pending| &mut pending.selection) } - - /// Compute new ranges for any selections that were located in excerpts that have - /// since been removed. - /// - /// Returns a `HashMap` indicating which selections whose former head position - /// was no longer present. The keys of the map are selection ids. The values are - /// the id of the new excerpt where the head of the selection has been moved. - pub fn refresh(&mut self) -> HashMap { - let mut pending = self.collection.pending.take(); - let mut selections_with_lost_position = HashMap::default(); - - let anchors_with_status = { - let disjoint_anchors = self - .disjoint - .iter() - .flat_map(|selection| [&selection.start, &selection.end]); - self.snapshot.refresh_anchors(disjoint_anchors) - }; - let adjusted_disjoint: Vec<_> = anchors_with_status - .chunks(2) - .map(|selection_anchors| { - let (anchor_ix, start, kept_start) = selection_anchors[0]; - let (_, end, kept_end) = selection_anchors[1]; - let selection = &self.disjoint[anchor_ix / 2]; - let kept_head = if selection.reversed { - kept_start - } else { - kept_end - }; - if !kept_head { - selections_with_lost_position.insert(selection.id, selection.head().excerpt_id); - } - - Selection { - id: selection.id, - start, - end, - reversed: selection.reversed, - goal: selection.goal, - } - }) - .collect(); - - if !adjusted_disjoint.is_empty() { - let map = self.display_snapshot(); - let resolved_selections = - resolve_selections_wrapping_blocks(adjusted_disjoint.iter(), &map).collect(); - self.select::(resolved_selections); - } - - if let Some(pending) = pending.as_mut() { - let anchors = self - .snapshot - .refresh_anchors([&pending.selection.start, &pending.selection.end]); - let (_, start, kept_start) = anchors[0]; - let (_, end, kept_end) = anchors[1]; - let kept_head = if pending.selection.reversed { - kept_start - } else { - kept_end - }; - if !kept_head { - selections_with_lost_position - .insert(pending.selection.id, pending.selection.head().excerpt_id); - } - - pending.selection.start = start; - pending.selection.end = end; - } - self.collection.pending = pending; - self.selections_changed = true; - - selections_with_lost_position - } } impl Deref for MutableSelectionsCollection<'_, '_> { diff --git a/crates/editor/src/semantic_tokens.rs b/crates/editor/src/semantic_tokens.rs index 8408438f17533098f906c75bcc03983edfb7acf8..5e78be70d5627bd4f484a3efd44b13519b31b400 100644 --- a/crates/editor/src/semantic_tokens.rs +++ b/crates/editor/src/semantic_tokens.rs @@ -148,9 +148,9 @@ impl Editor { }; let buffers_to_query = self - .visible_excerpts(true, cx) - .into_values() - .map(|(buffer, ..)| buffer) + .visible_buffers(cx) + .into_iter() + .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx)) .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id))) .filter_map(|editor_buffer| { let editor_buffer_id = editor_buffer.read(cx).remote_id(); @@ -365,11 +365,20 @@ fn convert_token( modifiers: u32, ) -> Option { let rules = stylizer.rules_for_token(token_type)?; - let matching = rules.iter().filter(|rule| { - rule.token_modifiers - .iter() - .all(|m| stylizer.has_modifier(modifiers, m)) - }); + let matching: Vec<_> = rules + .iter() + .filter(|rule| { + rule.token_modifiers + .iter() + .all(|m| stylizer.has_modifier(modifiers, m)) + }) + .collect(); + + if let Some(rule) = matching.last() { + if rule.no_style_defined() { + return None; + } + } let mut highlight = HighlightStyle::default(); let mut empty = true; @@ -463,7 +472,9 @@ mod tests { }; use futures::StreamExt as _; - use gpui::{AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext}; + use gpui::{ + AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext, UpdateGlobal as _, + }; use language::{Language, LanguageConfig, LanguageMatcher}; use languages::FakeLspAdapter; use multi_buffer::{ @@ -472,7 +483,10 @@ mod tests { use project::Project; use rope::Point; use serde_json::json; - use settings::{LanguageSettingsContent, SemanticTokenRules, SemanticTokens, SettingsStore}; + use settings::{ + GlobalLspSettingsContent, LanguageSettingsContent, SemanticTokenRule, SemanticTokenRules, + SemanticTokens, SettingsStore, + }; use workspace::{MultiWorkspace, WorkspaceHandle as _}; use crate::{ @@ -1214,11 +1228,19 @@ mod tests { ); // Get the excerpt id for the TOML excerpt and expand it down by 2 lines. - let toml_excerpt_id = - editor.read_with(cx, |editor, cx| editor.buffer().read(cx).excerpt_ids()[0]); + let toml_anchor = editor.read_with(cx, |editor, cx| { + editor + .buffer() + .read(cx) + .snapshot(cx) + .anchor_in_excerpt(text::Anchor::min_for_buffer( + toml_buffer.read(cx).remote_id(), + )) + .unwrap() + }); editor.update_in(cx, |editor, _, cx| { editor.buffer().update(cx, |buffer, cx| { - buffer.expand_excerpts([toml_excerpt_id], 2, ExpandExcerptDirection::Down, cx); + buffer.expand_excerpts([toml_anchor], 2, ExpandExcerptDirection::Down, cx); }); }); @@ -1816,6 +1838,256 @@ mod tests { ); } + #[gpui::test] + async fn test_semantic_token_disabling_with_empty_rule(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + update_test_language_settings(cx, &|s| { + s.languages.0.insert( + "Rust".into(), + LanguageSettingsContent { + semantic_tokens: Some(SemanticTokens::Full), + ..Default::default() + }, + ); + }); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + semantic_tokens_provider: Some( + lsp::SemanticTokensServerCapabilities::SemanticTokensOptions( + lsp::SemanticTokensOptions { + legend: lsp::SemanticTokensLegend { + token_types: vec!["function".into()], + token_modifiers: vec![], + }, + full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }), + ..Default::default() + }, + ), + ), + ..Default::default() + }, + cx, + ) + .await; + + let mut full_request = cx + .set_request_handler::( + move |_, _, _| async move { + Ok(Some(lsp::SemanticTokensResult::Tokens( + lsp::SemanticTokens { + data: vec![0, 3, 4, 0, 0], + result_id: None, + }, + ))) + }, + ); + + // Verify it highlights by default + cx.set_state("ˇfn main() {}"); + full_request.next().await; + cx.run_until_parked(); + assert_eq!(extract_semantic_highlights(&cx.editor, &cx).len(), 1); + + // Apply EMPTY rule to disable it + cx.update(|_, cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.global_lsp_settings = Some(GlobalLspSettingsContent { + semantic_token_rules: Some(SemanticTokenRules { + rules: vec![SemanticTokenRule { + token_type: Some("function".to_string()), + ..Default::default() + }], + }), + ..Default::default() + }); + }); + }); + }); + + cx.set_state("ˇfn main() { }"); + full_request.next().await; + cx.run_until_parked(); + + assert!( + extract_semantic_highlights(&cx.editor, &cx).is_empty(), + "Highlighting should be disabled by empty style setting" + ); + } + + #[gpui::test] + async fn test_semantic_token_broad_rule_disables_specific_token(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + update_test_language_settings(cx, &|s| { + s.languages.0.insert( + "Rust".into(), + LanguageSettingsContent { + semantic_tokens: Some(SemanticTokens::Full), + ..Default::default() + }, + ); + }); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + semantic_tokens_provider: Some( + lsp::SemanticTokensServerCapabilities::SemanticTokensOptions( + lsp::SemanticTokensOptions { + legend: lsp::SemanticTokensLegend { + token_types: vec!["comment".into()], + token_modifiers: vec!["documentation".into()], + }, + full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }), + ..Default::default() + }, + ), + ), + ..Default::default() + }, + cx, + ) + .await; + + let mut full_request = cx + .set_request_handler::( + move |_, _, _| async move { + Ok(Some(lsp::SemanticTokensResult::Tokens( + lsp::SemanticTokens { + data: vec![0, 0, 5, 0, 1], // comment [documentation] + result_id: None, + }, + ))) + }, + ); + + cx.set_state("ˇ/// d\n"); + full_request.next().await; + cx.run_until_parked(); + assert_eq!( + extract_semantic_highlights(&cx.editor, &cx).len(), + 1, + "Documentation comment should be highlighted" + ); + + // Apply a BROAD empty rule for "comment" (no modifiers) + cx.update(|_, cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.global_lsp_settings = Some(GlobalLspSettingsContent { + semantic_token_rules: Some(SemanticTokenRules { + rules: vec![SemanticTokenRule { + token_type: Some("comment".to_string()), + ..Default::default() + }], + }), + ..Default::default() + }); + }); + }); + }); + + cx.set_state("ˇ/// d\n"); + full_request.next().await; + cx.run_until_parked(); + + assert!( + extract_semantic_highlights(&cx.editor, &cx).is_empty(), + "Broad empty rule should disable specific documentation comment" + ); + } + + #[gpui::test] + async fn test_semantic_token_specific_rule_does_not_disable_broad_token( + cx: &mut TestAppContext, + ) { + use gpui::UpdateGlobal as _; + use settings::{GlobalLspSettingsContent, SemanticTokenRule}; + + init_test(cx, |_| {}); + update_test_language_settings(cx, &|s| { + s.languages.0.insert( + "Rust".into(), + LanguageSettingsContent { + semantic_tokens: Some(SemanticTokens::Full), + ..Default::default() + }, + ); + }); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + semantic_tokens_provider: Some( + lsp::SemanticTokensServerCapabilities::SemanticTokensOptions( + lsp::SemanticTokensOptions { + legend: lsp::SemanticTokensLegend { + token_types: vec!["comment".into()], + token_modifiers: vec!["documentation".into()], + }, + full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }), + ..Default::default() + }, + ), + ), + ..Default::default() + }, + cx, + ) + .await; + + let mut full_request = cx + .set_request_handler::( + move |_, _, _| async move { + Ok(Some(lsp::SemanticTokensResult::Tokens( + lsp::SemanticTokens { + data: vec![ + 0, 0, 5, 0, 1, // comment [documentation] + 1, 0, 5, 0, 0, // normal comment + ], + result_id: None, + }, + ))) + }, + ); + + cx.set_state("ˇ/// d\n// n\n"); + full_request.next().await; + cx.run_until_parked(); + assert_eq!( + extract_semantic_highlights(&cx.editor, &cx).len(), + 2, + "Both documentation and normal comments should be highlighted initially" + ); + + // Apply a SPECIFIC empty rule for documentation only + cx.update(|_, cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.global_lsp_settings = Some(GlobalLspSettingsContent { + semantic_token_rules: Some(SemanticTokenRules { + rules: vec![SemanticTokenRule { + token_type: Some("comment".to_string()), + token_modifiers: vec!["documentation".to_string()], + ..Default::default() + }], + }), + ..Default::default() + }); + }); + }); + }); + + cx.set_state("ˇ/// d\n// n\n"); + full_request.next().await; + cx.run_until_parked(); + + assert_eq!( + extract_semantic_highlights(&cx.editor, &cx).len(), + 1, + "Normal comment should still be highlighted (matched by default rule)" + ); + } + fn extract_semantic_highlight_styles( editor: &Entity, cx: &TestAppContext, diff --git a/crates/editor/src/split.rs b/crates/editor/src/split.rs index cdb016ea4b612aaae288acd008f745ef2ecf0f1d..ee15583072144ca170328988ebec9959b391dbf1 100644 --- a/crates/editor/src/split.rs +++ b/crates/editor/src/split.rs @@ -1,5 +1,5 @@ use std::{ - ops::{Bound, Range, RangeInclusive}, + ops::{Range, RangeInclusive}, sync::Arc, }; @@ -13,7 +13,7 @@ use gpui::{ use itertools::Itertools; use language::{Buffer, Capability, HighlightedText}; use multi_buffer::{ - Anchor, BufferOffset, ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer, + Anchor, AnchorRangeExt as _, BufferOffset, ExcerptRange, ExpandExcerptDirection, MultiBuffer, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferSnapshot, PathKey, }; use project::Project; @@ -44,13 +44,11 @@ use crate::{ use zed_actions::assistant::InlineAssist; pub(crate) fn convert_lhs_rows_to_rhs( - lhs_excerpt_to_rhs_excerpt: &HashMap, rhs_snapshot: &MultiBufferSnapshot, lhs_snapshot: &MultiBufferSnapshot, - lhs_bounds: (Bound, Bound), + lhs_bounds: Range, ) -> Vec { patches_for_range( - lhs_excerpt_to_rhs_excerpt, lhs_snapshot, rhs_snapshot, lhs_bounds, @@ -59,13 +57,11 @@ pub(crate) fn convert_lhs_rows_to_rhs( } pub(crate) fn convert_rhs_rows_to_lhs( - rhs_excerpt_to_lhs_excerpt: &HashMap, lhs_snapshot: &MultiBufferSnapshot, rhs_snapshot: &MultiBufferSnapshot, - rhs_bounds: (Bound, Bound), + rhs_bounds: Range, ) -> Vec { patches_for_range( - rhs_excerpt_to_lhs_excerpt, rhs_snapshot, lhs_snapshot, rhs_bounds, @@ -73,6 +69,21 @@ pub(crate) fn convert_rhs_rows_to_lhs( ) } +fn rhs_range_to_base_text_range( + rhs_range: &Range, + diff_snapshot: &BufferDiffSnapshot, + rhs_buffer_snapshot: &text::BufferSnapshot, +) -> Range { + let start = diff_snapshot + .buffer_point_to_base_text_range(Point::new(rhs_range.start.row, 0), rhs_buffer_snapshot) + .start; + let end = diff_snapshot + .buffer_point_to_base_text_range(Point::new(rhs_range.end.row, 0), rhs_buffer_snapshot) + .end; + let end_column = diff_snapshot.base_text().line_len(end.row); + Point::new(start.row, 0)..Point::new(end.row, end_column) +} + fn translate_lhs_selections_to_rhs( selections_by_buffer: &HashMap>, Option)>, splittable: &SplittableEditor, @@ -168,22 +179,18 @@ fn translate_lhs_hunks_to_rhs( } fn patches_for_range( - excerpt_map: &HashMap, source_snapshot: &MultiBufferSnapshot, target_snapshot: &MultiBufferSnapshot, - source_bounds: (Bound, Bound), + source_bounds: Range, translate_fn: F, ) -> Vec where F: Fn(&BufferDiffSnapshot, RangeInclusive, &text::BufferSnapshot) -> Patch, { - struct PendingExcerpt<'a> { - source_excerpt_id: ExcerptId, - target_excerpt_id: ExcerptId, - source_buffer: &'a text::BufferSnapshot, - target_buffer: &'a text::BufferSnapshot, + struct PendingExcerpt { + source_buffer_snapshot: language::BufferSnapshot, + source_excerpt_range: ExcerptRange, buffer_point_range: Range, - source_context_range: Range, } let mut result = Vec::new(); @@ -201,41 +208,55 @@ where }; let diff = source_snapshot - .diff_for_buffer_id(first.source_buffer.remote_id()) + .diff_for_buffer_id(first.source_buffer_snapshot.remote_id()) .expect("buffer with no diff when creating patches"); - let rhs_buffer = if first.source_buffer.remote_id() == diff.base_text().remote_id() { - first.target_buffer + let source_is_lhs = + first.source_buffer_snapshot.remote_id() == diff.base_text().remote_id(); + let target_buffer_id = if source_is_lhs { + diff.buffer_id() } else { - first.source_buffer + diff.base_text().remote_id() + }; + let target_buffer = target_snapshot + .buffer_for_id(target_buffer_id) + .expect("missing corresponding buffer"); + let rhs_buffer = if source_is_lhs { + target_buffer + } else { + &first.source_buffer_snapshot }; let patch = translate_fn(diff, union_start..=union_end, rhs_buffer); for excerpt in pending.drain(..) { + let target_position = patch.old_to_new(excerpt.buffer_point_range.start); + let target_position = target_buffer.anchor_before(target_position); + let Some(target_position) = target_snapshot.anchor_in_excerpt(target_position) else { + continue; + }; + let Some((target_buffer_snapshot, target_excerpt_range)) = + target_snapshot.excerpt_containing(target_position..target_position) + else { + continue; + }; + result.push(patch_for_excerpt( source_snapshot, target_snapshot, - excerpt.source_excerpt_id, - excerpt.target_excerpt_id, - excerpt.target_buffer, - excerpt.source_context_range, + &excerpt.source_buffer_snapshot, + target_buffer_snapshot, + excerpt.source_excerpt_range, + target_excerpt_range, &patch, excerpt.buffer_point_range, )); } }; - for (source_buffer, buffer_offset_range, source_excerpt_id, source_context_range) in - source_snapshot.range_to_buffer_ranges_with_context(source_bounds) + for (buffer_snapshot, source_range, source_excerpt_range) in + source_snapshot.range_to_buffer_ranges(source_bounds) { - let Some(target_excerpt_id) = excerpt_map.get(&source_excerpt_id).copied() else { - continue; - }; - let Some(target_buffer) = target_snapshot.buffer_for_excerpt(target_excerpt_id) else { - continue; - }; - - let buffer_id = source_buffer.remote_id(); + let buffer_id = buffer_snapshot.remote_id(); if current_buffer_id != Some(buffer_id) { if let (Some(start), Some(end)) = (union_context_start.take(), union_context_end.take()) @@ -245,8 +266,8 @@ where current_buffer_id = Some(buffer_id); } - let buffer_point_range = buffer_offset_range.to_point(source_buffer); - let source_context_range = source_context_range.to_point(source_buffer); + let buffer_point_range = source_range.to_point(&buffer_snapshot); + let source_context_range = source_excerpt_range.context.to_point(&buffer_snapshot); union_context_start = Some(union_context_start.map_or(source_context_range.start, |s| { s.min(source_context_range.start) @@ -256,12 +277,9 @@ where })); pending_excerpts.push(PendingExcerpt { - source_excerpt_id, - target_excerpt_id, - source_buffer, - target_buffer, + source_buffer_snapshot: buffer_snapshot, + source_excerpt_range, buffer_point_range, - source_context_range, }); } @@ -275,55 +293,60 @@ where fn patch_for_excerpt( source_snapshot: &MultiBufferSnapshot, target_snapshot: &MultiBufferSnapshot, - source_excerpt_id: ExcerptId, - target_excerpt_id: ExcerptId, - target_buffer: &text::BufferSnapshot, - source_context_range: Range, + source_buffer_snapshot: &language::BufferSnapshot, + target_buffer_snapshot: &language::BufferSnapshot, + source_excerpt_range: ExcerptRange, + target_excerpt_range: ExcerptRange, patch: &Patch, source_edited_range: Range, ) -> CompanionExcerptPatch { - let source_multibuffer_range = source_snapshot - .range_for_excerpt(source_excerpt_id) - .expect("no excerpt for source id when creating patch"); - let source_excerpt_start_in_multibuffer = source_multibuffer_range.start; - let source_excerpt_start_in_buffer = source_context_range.start; - let source_excerpt_end_in_buffer = source_context_range.end; - let target_multibuffer_range = target_snapshot - .range_for_excerpt(target_excerpt_id) - .expect("no excerpt for target id when creating patch"); - let target_excerpt_start_in_multibuffer = target_multibuffer_range.start; - let target_context_range = target_snapshot - .context_range_for_excerpt(target_excerpt_id) - .expect("no range for target id when creating patch"); - let target_excerpt_start_in_buffer = target_context_range.start.to_point(&target_buffer); - let target_excerpt_end_in_buffer = target_context_range.end.to_point(&target_buffer); + let source_buffer_range = source_excerpt_range + .context + .to_point(source_buffer_snapshot); + let source_multibuffer_range = (source_snapshot + .anchor_in_buffer(source_excerpt_range.context.start) + .expect("buffer should exist in multibuffer") + ..source_snapshot + .anchor_in_buffer(source_excerpt_range.context.end) + .expect("buffer should exist in multibuffer")) + .to_point(source_snapshot); + let target_buffer_range = target_excerpt_range + .context + .to_point(target_buffer_snapshot); + let target_multibuffer_range = (target_snapshot + .anchor_in_buffer(target_excerpt_range.context.start) + .expect("buffer should exist in multibuffer") + ..target_snapshot + .anchor_in_buffer(target_excerpt_range.context.end) + .expect("buffer should exist in multibuffer")) + .to_point(target_snapshot); let edits = patch .edits() .iter() - .skip_while(|edit| edit.old.end < source_excerpt_start_in_buffer) - .take_while(|edit| edit.old.start <= source_excerpt_end_in_buffer) + .skip_while(|edit| edit.old.end < source_buffer_range.start) + .take_while(|edit| edit.old.start <= source_buffer_range.end) .map(|edit| { - let clamped_source_start = edit.old.start.max(source_excerpt_start_in_buffer); - let clamped_source_end = edit.old.end.min(source_excerpt_end_in_buffer); - let source_multibuffer_start = source_excerpt_start_in_multibuffer - + (clamped_source_start - source_excerpt_start_in_buffer); - let source_multibuffer_end = source_excerpt_start_in_multibuffer - + (clamped_source_end - source_excerpt_start_in_buffer); + let clamped_source_start = edit.old.start.max(source_buffer_range.start); + let clamped_source_end = edit.old.end.min(source_buffer_range.end); + let source_multibuffer_start = + source_multibuffer_range.start + (clamped_source_start - source_buffer_range.start); + let source_multibuffer_end = + source_multibuffer_range.start + (clamped_source_end - source_buffer_range.start); let clamped_target_start = edit .new .start - .max(target_excerpt_start_in_buffer) - .min(target_excerpt_end_in_buffer); + .max(target_buffer_range.start) + .min(target_buffer_range.end); let clamped_target_end = edit .new .end - .max(target_excerpt_start_in_buffer) - .min(target_excerpt_end_in_buffer); - let target_multibuffer_start = target_excerpt_start_in_multibuffer - + (clamped_target_start - target_excerpt_start_in_buffer); - let target_multibuffer_end = target_excerpt_start_in_multibuffer - + (clamped_target_end - target_excerpt_start_in_buffer); + .max(target_buffer_range.start) + .min(target_buffer_range.end); + let target_multibuffer_start = + target_multibuffer_range.start + (clamped_target_start - target_buffer_range.start); + let target_multibuffer_end = + target_multibuffer_range.start + (clamped_target_end - target_buffer_range.start); text::Edit { old: source_multibuffer_start..source_multibuffer_end, new: target_multibuffer_start..target_multibuffer_end, @@ -331,8 +354,8 @@ fn patch_for_excerpt( }); let edits = [text::Edit { - old: source_excerpt_start_in_multibuffer..source_excerpt_start_in_multibuffer, - new: target_excerpt_start_in_multibuffer..target_excerpt_start_in_multibuffer, + old: source_multibuffer_range.start..source_multibuffer_range.start, + new: target_multibuffer_range.start..target_multibuffer_range.start, }] .into_iter() .chain(edits); @@ -349,21 +372,20 @@ fn patch_for_excerpt( merged_edits.push(edit); } - let edited_range = source_excerpt_start_in_multibuffer - + (source_edited_range.start - source_excerpt_start_in_buffer) - ..source_excerpt_start_in_multibuffer - + (source_edited_range.end - source_excerpt_start_in_buffer); + let edited_range = source_multibuffer_range.start + + (source_edited_range.start - source_buffer_range.start) + ..source_multibuffer_range.start + (source_edited_range.end - source_buffer_range.start); - let source_excerpt_end = source_excerpt_start_in_multibuffer - + (source_excerpt_end_in_buffer - source_excerpt_start_in_buffer); - let target_excerpt_end = target_excerpt_start_in_multibuffer - + (target_excerpt_end_in_buffer - target_excerpt_start_in_buffer); + let source_excerpt_end = + source_multibuffer_range.start + (source_buffer_range.end - source_buffer_range.start); + let target_excerpt_end = + target_multibuffer_range.start + (target_buffer_range.end - target_buffer_range.start); CompanionExcerptPatch { patch: Patch::new(merged_edits), edited_range, - source_excerpt_range: source_excerpt_start_in_multibuffer..source_excerpt_end, - target_excerpt_range: target_excerpt_start_in_multibuffer..target_excerpt_end, + source_excerpt_range: source_multibuffer_range.start..source_excerpt_end, + target_excerpt_range: target_multibuffer_range.start..target_excerpt_end, } } @@ -390,6 +412,7 @@ pub struct SplittableEditor { struct LhsEditor { multibuffer: Entity, editor: Entity, + companion: Entity, was_last_focused: bool, _subscriptions: Vec, } @@ -470,11 +493,16 @@ impl SplittableEditor { &rhs_editor, |this, _, event: &EditorEvent, cx| match event { EditorEvent::ExpandExcerptsRequested { - excerpt_ids, + excerpt_anchors, lines, direction, } => { - this.expand_excerpts(excerpt_ids.iter().copied(), *lines, *direction, cx); + this.expand_excerpts( + excerpt_anchors.iter().copied(), + *lines, + *direction, + cx, + ); } _ => cx.emit(event.clone()), }, @@ -563,19 +591,31 @@ impl SplittableEditor { window, |this, _, event: &EditorEvent, window, cx| match event { EditorEvent::ExpandExcerptsRequested { - excerpt_ids, + excerpt_anchors, lines, direction, } => { - if this.lhs.is_some() { - let rhs_display_map = this.rhs_editor.read(cx).display_map.read(cx); - let rhs_ids: Vec<_> = excerpt_ids + if let Some(lhs) = &this.lhs { + let rhs_snapshot = this.rhs_multibuffer.read(cx).snapshot(cx); + let lhs_snapshot = lhs.multibuffer.read(cx).snapshot(cx); + let rhs_anchors = excerpt_anchors .iter() - .filter_map(|id| { - rhs_display_map.companion_excerpt_to_my_excerpt(*id, cx) + .filter_map(|anchor| { + let (anchor, lhs_buffer) = + lhs_snapshot.anchor_to_buffer_anchor(*anchor)?; + let rhs_buffer_id = + lhs.companion.read(cx).lhs_to_rhs_buffer(anchor.buffer_id)?; + let rhs_buffer = rhs_snapshot.buffer_for_id(rhs_buffer_id)?; + let diff = this.rhs_multibuffer.read(cx).diff_for(rhs_buffer_id)?; + let diff_snapshot = diff.read(cx).snapshot(cx); + let rhs_point = diff_snapshot.base_text_point_to_buffer_point( + anchor.to_point(&lhs_buffer), + &rhs_buffer, + ); + rhs_snapshot.anchor_in_excerpt(rhs_buffer.anchor_before(rhs_point)) }) - .collect(); - this.expand_excerpts(rhs_ids.into_iter(), *lines, *direction, cx); + .collect::>(); + this.expand_excerpts(rhs_anchors.into_iter(), *lines, *direction, cx); } } EditorEvent::StageOrUnstageRequested { stage, hunks } => { @@ -654,15 +694,23 @@ impl SplittableEditor { }), ); + let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); + let lhs_display_map = lhs_editor.read(cx).display_map.clone(); + let rhs_display_map_id = rhs_display_map.entity_id(); + let companion = cx.new(|_| { + Companion::new( + rhs_display_map_id, + convert_rhs_rows_to_lhs, + convert_lhs_rows_to_rhs, + ) + }); let lhs = LhsEditor { editor: lhs_editor, multibuffer: lhs_multibuffer, was_last_focused: false, + companion: companion.clone(), _subscriptions: subscriptions, }; - let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); - let lhs_display_map = lhs.editor.read(cx).display_map.clone(); - let rhs_display_map_id = rhs_display_map.entity_id(); self.rhs_editor.update(cx, |editor, cx| { editor.set_delegate_expand_excerpts(true); @@ -672,35 +720,21 @@ impl SplittableEditor { }) }); - let path_diffs: Vec<_> = { + let all_paths: Vec<_> = { let rhs_multibuffer = self.rhs_multibuffer.read(cx); - rhs_multibuffer - .paths() - .filter_map(|path| { - let excerpt_id = rhs_multibuffer.excerpts_for_path(path).next()?; - let snapshot = rhs_multibuffer.snapshot(cx); - let buffer = snapshot.buffer_for_excerpt(excerpt_id)?; + let rhs_multibuffer_snapshot = rhs_multibuffer.snapshot(cx); + rhs_multibuffer_snapshot + .buffers_with_paths() + .filter_map(|(buffer, path)| { let diff = rhs_multibuffer.diff_for(buffer.remote_id())?; Some((path.clone(), diff)) }) .collect() }; - let companion = cx.new(|_| { - Companion::new( - rhs_display_map_id, - convert_rhs_rows_to_lhs, - convert_lhs_rows_to_rhs, - ) - }); - self.lhs = Some(lhs); - let paths_for_sync: Vec<_> = path_diffs - .into_iter() - .map(|(path, diff)| (path, vec![], diff)) - .collect(); - self.sync_lhs_for_paths(paths_for_sync, &companion, cx); + self.sync_lhs_for_paths(all_paths, &companion, cx); rhs_display_map.update(cx, |dm, cx| { dm.set_companion(Some((lhs_display_map, companion.clone())), cx); @@ -1004,7 +1038,7 @@ impl SplittableEditor { cx.notify(); } - pub fn set_excerpts_for_path( + pub fn update_excerpts_for_path( &mut self, path: PathKey, buffer: Entity, @@ -1012,122 +1046,94 @@ impl SplittableEditor { context_line_count: u32, diff: Entity, cx: &mut Context, - ) -> (Vec>, bool) { + ) -> bool { + let has_ranges = ranges.clone().into_iter().next().is_some(); let Some(companion) = self.companion(cx) else { return self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path( + let added_a_new_excerpt = rhs_multibuffer.update_excerpts_for_path( path, buffer.clone(), ranges, context_line_count, cx, ); - if !anchors.is_empty() + if has_ranges && rhs_multibuffer .diff_for(buffer.read(cx).remote_id()) .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) { rhs_multibuffer.add_diff(diff, cx); } - (anchors, added_a_new_excerpt) + added_a_new_excerpt }); }; - let old_rhs_ids: Vec = self - .rhs_multibuffer - .read(cx) - .excerpts_for_path(&path) - .collect(); - let result = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path( + let added_a_new_excerpt = rhs_multibuffer.update_excerpts_for_path( path.clone(), buffer.clone(), ranges, context_line_count, cx, ); - if !anchors.is_empty() + if has_ranges && rhs_multibuffer .diff_for(buffer.read(cx).remote_id()) .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) { rhs_multibuffer.add_diff(diff.clone(), cx); } - (anchors, added_a_new_excerpt) + added_a_new_excerpt }); - self.sync_lhs_for_paths(vec![(path, old_rhs_ids, diff)], &companion, cx); + self.sync_lhs_for_paths(vec![(path, diff)], &companion, cx); result } fn expand_excerpts( &mut self, - excerpt_ids: impl Iterator + Clone, + excerpt_anchors: impl Iterator + Clone, lines: u32, direction: ExpandExcerptDirection, cx: &mut Context, ) { let Some(companion) = self.companion(cx) else { self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - rhs_multibuffer.expand_excerpts(excerpt_ids, lines, direction, cx); + rhs_multibuffer.expand_excerpts(excerpt_anchors, lines, direction, cx); }); return; }; - let paths_with_old_ids: Vec<_> = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { + let paths: Vec<_> = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { let snapshot = rhs_multibuffer.snapshot(cx); - let paths = excerpt_ids + let paths = excerpt_anchors .clone() - .filter_map(|excerpt_id| { - let path = rhs_multibuffer.path_for_excerpt(excerpt_id)?; - let buffer = snapshot.buffer_for_excerpt(excerpt_id)?; - let diff = rhs_multibuffer.diff_for(buffer.remote_id())?; - Some((path, diff)) + .filter_map(|anchor| { + let (anchor, _) = snapshot.anchor_to_buffer_anchor(anchor)?; + let path = snapshot.path_for_buffer(anchor.buffer_id)?; + let diff = rhs_multibuffer.diff_for(anchor.buffer_id)?; + Some((path.clone(), diff)) }) .collect::>() .into_iter() - .map(|(path, diff)| { - let old_ids = rhs_multibuffer.excerpts_for_path(&path).collect(); - (path, old_ids, diff) - }) .collect(); - rhs_multibuffer.expand_excerpts(excerpt_ids, lines, direction, cx); + rhs_multibuffer.expand_excerpts(excerpt_anchors, lines, direction, cx); paths }); - self.sync_lhs_for_paths(paths_with_old_ids, &companion, cx); + self.sync_lhs_for_paths(paths, &companion, cx); } pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context) { - let Some(lhs) = &self.lhs else { - self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - rhs_multibuffer.remove_excerpts_for_path(path, cx); - }); - return; - }; - - let rhs_excerpt_ids: Vec = self - .rhs_multibuffer - .read(cx) - .excerpts_for_path(&path) - .collect(); - let lhs_excerpt_ids: Vec = - lhs.multibuffer.read(cx).excerpts_for_path(&path).collect(); + self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { + rhs_multibuffer.remove_excerpts(path.clone(), cx); + }); - let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); - if let Some(companion) = rhs_display_map.read(cx).companion().cloned() { - companion.update(cx, |c, _| { - c.remove_excerpt_mappings(lhs_excerpt_ids, rhs_excerpt_ids); + if let Some(lhs) = &self.lhs { + lhs.multibuffer.update(cx, |lhs_multibuffer, cx| { + lhs_multibuffer.remove_excerpts(path, cx); }); } - - self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - rhs_multibuffer.remove_excerpts_for_path(path.clone(), cx); - }); - lhs.multibuffer.update(cx, |lhs_multibuffer, cx| { - lhs_multibuffer.remove_excerpts_for_path(path, cx); - }); } fn search_token(&self) -> SearchToken { @@ -1151,122 +1157,95 @@ impl SplittableEditor { fn sync_lhs_for_paths( &self, - paths_with_old_rhs_ids: Vec<(PathKey, Vec, Entity)>, + paths: Vec<(PathKey, Entity)>, companion: &Entity, cx: &mut Context, ) { let Some(lhs) = &self.lhs else { return }; self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - for (path, old_rhs_ids, diff) in paths_with_old_rhs_ids { - let old_lhs_ids: Vec = - lhs.multibuffer.read(cx).excerpts_for_path(&path).collect(); - - companion.update(cx, |c, _| { - c.remove_excerpt_mappings(old_lhs_ids, old_rhs_ids); - }); - - let rhs_excerpt_ids: Vec = - rhs_multibuffer.excerpts_for_path(&path).collect(); - let Some(excerpt_id) = rhs_excerpt_ids.first().copied() else { + for (path, diff) in paths { + let main_buffer_id = diff.read(cx).buffer_id; + let Some(main_buffer) = rhs_multibuffer.buffer(diff.read(cx).buffer_id) else { lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| { - lhs_multibuffer.remove_excerpts_for_path(path, lhs_cx); + lhs_multibuffer.remove_excerpts(path, lhs_cx); }); continue; }; - let Some(main_buffer_snapshot) = rhs_multibuffer - .snapshot(cx) - .buffer_for_excerpt(excerpt_id) - .cloned() - else { - continue; - }; - let Some(main_buffer) = rhs_multibuffer.buffer(main_buffer_snapshot.remote_id()) - else { - continue; - }; + let main_buffer_snapshot = main_buffer.read(cx).snapshot(); let base_text_buffer = diff.read(cx).base_text_buffer().clone(); let diff_snapshot = diff.read(cx).snapshot(cx); let base_text_buffer_snapshot = base_text_buffer.read(cx).snapshot(); - let lhs_ranges: Vec> = rhs_multibuffer - .excerpts_for_buffer(main_buffer_snapshot.remote_id(), cx) + let mut paired_ranges: Vec<(Range, ExcerptRange)> = Vec::new(); + + let mut have_excerpt = false; + let mut did_merge = false; + let rhs_multibuffer_snapshot = rhs_multibuffer.snapshot(cx); + for info in rhs_multibuffer_snapshot.excerpts_for_buffer(main_buffer_id) { + have_excerpt = true; + let rhs_context = info.context.to_point(&main_buffer_snapshot); + let lhs_context = rhs_range_to_base_text_range( + &rhs_context, + &diff_snapshot, + &main_buffer_snapshot, + ); + + if let Some((prev_lhs_context, prev_rhs_range)) = paired_ranges.last_mut() + && prev_lhs_context.end >= lhs_context.start + { + did_merge = true; + prev_lhs_context.end = lhs_context.end; + prev_rhs_range.context.end = info.context.end; + continue; + } + + paired_ranges.push((lhs_context, info)); + } + + let (lhs_ranges, rhs_ranges): (Vec<_>, Vec<_>) = paired_ranges.into_iter().unzip(); + let lhs_ranges = lhs_ranges .into_iter() - .filter(|(id, _, _)| rhs_excerpt_ids.contains(id)) - .map(|(_, _, excerpt_range)| { - let to_base_text = |range: Range| { - let start = diff_snapshot - .buffer_point_to_base_text_range( - Point::new(range.start.row, 0), - &main_buffer_snapshot, - ) - .start; - let end = diff_snapshot - .buffer_point_to_base_text_range( - Point::new(range.end.row, 0), - &main_buffer_snapshot, - ) - .end; - let end_column = diff_snapshot.base_text().line_len(end.row); - Point::new(start.row, 0)..Point::new(end.row, end_column) - }; - let primary = excerpt_range.primary.to_point(&main_buffer_snapshot); - let context = excerpt_range.context.to_point(&main_buffer_snapshot); - ExcerptRange { - primary: to_base_text(primary), - context: to_base_text(context), - } + .map(|range| { + ExcerptRange::new(base_text_buffer_snapshot.anchor_range_outside(range)) }) - .collect(); + .collect::>(); - let groups = lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| { - let lhs_result = lhs_multibuffer.update_path_excerpts( - path, + lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| { + lhs_multibuffer.update_path_excerpts( + path.clone(), base_text_buffer, &base_text_buffer_snapshot, - lhs_ranges, + &lhs_ranges, lhs_cx, ); - if !lhs_result.excerpt_ids.is_empty() + if have_excerpt && lhs_multibuffer .diff_for(base_text_buffer_snapshot.remote_id()) .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) { - lhs_multibuffer.add_inverted_diff(diff.clone(), main_buffer, lhs_cx); - } - - let mut groups = Vec::new(); - for (lhs_id, chunk) in &lhs_result - .excerpt_ids - .iter() - .copied() - .zip(rhs_excerpt_ids) - .chunk_by(|(lhs_id, _)| *lhs_id) - { - groups.push((lhs_id, chunk.map(|(_, rhs_id)| rhs_id).collect::>())); + lhs_multibuffer.add_inverted_diff( + diff.clone(), + main_buffer.clone(), + lhs_cx, + ); } - groups }); - let pairs = groups - .into_iter() - .map(|(lhs_id, rhs_group)| { - let rhs_id = if rhs_group.len() == 1 { - rhs_group[0] - } else { - rhs_multibuffer.merge_excerpts(&rhs_group, cx) - }; - (lhs_id, rhs_id) - }) - .collect::>(); + if did_merge { + rhs_multibuffer.update_path_excerpts( + path, + main_buffer, + &main_buffer_snapshot, + &rhs_ranges, + cx, + ); + } let lhs_buffer_id = diff.read(cx).base_text(cx).remote_id(); let rhs_buffer_id = diff.read(cx).buffer_id; companion.update(cx, |c, _| { - for (lhs_id, rhs_id) in pairs { - c.add_excerpt_mapping(lhs_id, rhs_id); - } c.add_buffer_mapping(lhs_buffer_id, rhs_buffer_id); }); } @@ -1312,7 +1291,7 @@ impl SplittableEditor { use crate::display_map::DisplayRow; self.debug_print(cx); - self.check_excerpt_mapping_invariants(cx); + self.check_excerpt_invariants(quiesced, cx); let lhs = self.lhs.as_ref().unwrap(); @@ -1362,15 +1341,21 @@ impl SplittableEditor { let (lhs_point, rhs_point) = if lhs_hunk.row_range.is_empty() || rhs_hunk.row_range.is_empty() { + use multi_buffer::ToPoint as _; + let lhs_end = Point::new(lhs_hunk.row_range.end.0, 0); let rhs_end = Point::new(rhs_hunk.row_range.end.0, 0); - let lhs_exceeds = lhs_snapshot - .range_for_excerpt(lhs_hunk.excerpt_id) - .map_or(false, |range| lhs_end >= range.end); - let rhs_exceeds = rhs_snapshot - .range_for_excerpt(rhs_hunk.excerpt_id) - .map_or(false, |range| rhs_end >= range.end); + let lhs_excerpt_end = lhs_snapshot + .anchor_in_excerpt(lhs_hunk.excerpt_range.context.end) + .unwrap() + .to_point(&lhs_snapshot); + let lhs_exceeds = lhs_end >= lhs_excerpt_end; + let rhs_excerpt_end = rhs_snapshot + .anchor_in_excerpt(rhs_hunk.excerpt_range.context.end) + .unwrap() + .to_point(&rhs_snapshot); + let rhs_exceeds = rhs_end >= rhs_excerpt_end; if lhs_exceeds != rhs_exceeds { continue; } @@ -1664,109 +1649,53 @@ impl SplittableEditor { eprintln!(); } - fn check_excerpt_mapping_invariants(&self, cx: &gpui::App) { - use multi_buffer::{ExcerptId, PathKey}; - + fn check_excerpt_invariants(&self, quiesced: bool, cx: &gpui::App) { let lhs = self.lhs.as_ref().expect("should have lhs editor"); - let rhs_excerpt_ids = self.rhs_multibuffer.read(cx).excerpt_ids(); - let lhs_excerpt_ids = lhs.multibuffer.read(cx).excerpt_ids(); - assert_eq!( - rhs_excerpt_ids.len(), - lhs_excerpt_ids.len(), - "excerpt count mismatch: rhs has {}, lhs has {}", - rhs_excerpt_ids.len(), - lhs_excerpt_ids.len(), - ); - - let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); - let companion = rhs_display_map - .read(cx) - .companion() - .cloned() - .expect("should have companion"); - let (lhs_to_rhs, rhs_to_lhs) = { - let c = companion.read(cx); - let (l, r) = c.excerpt_mappings(); - (l.clone(), r.clone()) - }; - - assert_eq!( - lhs_to_rhs.len(), - rhs_to_lhs.len(), - "mapping size mismatch: lhs_to_rhs has {}, rhs_to_lhs has {}", - lhs_to_rhs.len(), - rhs_to_lhs.len(), - ); + let rhs_snapshot = self.rhs_multibuffer.read(cx).snapshot(cx); + let rhs_excerpts = rhs_snapshot.excerpts().collect::>(); + let lhs_snapshot = lhs.multibuffer.read(cx).snapshot(cx); + let lhs_excerpts = lhs_snapshot.excerpts().collect::>(); + assert_eq!(lhs_excerpts.len(), rhs_excerpts.len()); - for (&lhs_id, &rhs_id) in &lhs_to_rhs { - let reverse = rhs_to_lhs.get(&rhs_id); - assert_eq!( - reverse, - Some(&lhs_id), - "lhs_to_rhs maps {lhs_id:?} -> {rhs_id:?}, but rhs_to_lhs maps {rhs_id:?} -> {reverse:?}", - ); - } - for (&rhs_id, &lhs_id) in &rhs_to_lhs { - let reverse = lhs_to_rhs.get(&lhs_id); + for (lhs_excerpt, rhs_excerpt) in lhs_excerpts.into_iter().zip(rhs_excerpts) { assert_eq!( - reverse, - Some(&rhs_id), - "rhs_to_lhs maps {rhs_id:?} -> {lhs_id:?}, but lhs_to_rhs maps {lhs_id:?} -> {reverse:?}", + lhs_snapshot + .path_for_buffer(lhs_excerpt.context.start.buffer_id) + .unwrap(), + rhs_snapshot + .path_for_buffer(rhs_excerpt.context.start.buffer_id) + .unwrap(), + "corresponding excerpts should have the same path" ); - } - - assert_eq!( - lhs_to_rhs.len(), - rhs_excerpt_ids.len(), - "mapping covers {} excerpts but rhs has {}", - lhs_to_rhs.len(), - rhs_excerpt_ids.len(), - ); - - let rhs_mapped_order: Vec = rhs_excerpt_ids - .iter() - .map(|rhs_id| { - *rhs_to_lhs.get(rhs_id).unwrap_or_else(|| { - panic!("rhs excerpt {rhs_id:?} has no mapping in rhs_to_lhs") - }) - }) - .collect(); - assert_eq!( - rhs_mapped_order, lhs_excerpt_ids, - "excerpt ordering mismatch: mapping rhs order through rhs_to_lhs doesn't match lhs order", - ); - - let rhs_paths: Vec = self.rhs_multibuffer.read(cx).paths().cloned().collect(); - let lhs_paths: Vec = lhs.multibuffer.read(cx).paths().cloned().collect(); - assert_eq!( - rhs_paths, lhs_paths, - "path set mismatch between rhs and lhs" - ); - - for path in &rhs_paths { - let rhs_path_excerpts: Vec = self + let diff = self .rhs_multibuffer .read(cx) - .excerpts_for_path(path) - .collect(); - let lhs_path_excerpts: Vec = - lhs.multibuffer.read(cx).excerpts_for_path(path).collect(); + .diff_for(rhs_excerpt.context.start.buffer_id) + .expect("missing diff"); assert_eq!( - rhs_path_excerpts.len(), - lhs_path_excerpts.len(), - "excerpt count mismatch for path {path:?}: rhs has {}, lhs has {}", - rhs_path_excerpts.len(), - lhs_path_excerpts.len(), - ); - let rhs_path_mapped: Vec = rhs_path_excerpts - .iter() - .map(|rhs_id| *rhs_to_lhs.get(rhs_id).unwrap()) - .collect(); - assert_eq!( - rhs_path_mapped, lhs_path_excerpts, - "per-path excerpt ordering mismatch for {path:?}", + lhs_excerpt.context.start.buffer_id, + diff.read(cx).base_text(cx).remote_id(), + "corresponding lhs excerpt should show diff base text" ); + + if quiesced { + let diff_snapshot = diff.read(cx).snapshot(cx); + let lhs_buffer_snapshot = lhs_snapshot + .buffer_for_id(lhs_excerpt.context.start.buffer_id) + .unwrap(); + let rhs_buffer_snapshot = rhs_snapshot + .buffer_for_id(rhs_excerpt.context.start.buffer_id) + .unwrap(); + let lhs_range = lhs_excerpt.context.to_point(&lhs_buffer_snapshot); + let rhs_range = rhs_excerpt.context.to_point(&rhs_buffer_snapshot); + let expected_lhs_range = + rhs_range_to_base_text_range(&rhs_range, &diff_snapshot, &rhs_buffer_snapshot); + assert_eq!( + lhs_range, expected_lhs_range, + "corresponding lhs excerpt should have a matching range" + ) + } } } } @@ -2316,7 +2245,7 @@ mod tests { let context_lines = rng.random_range(0..2); editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx); + editor.update_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx); }); editor.update(cx, |editor, cx| { editor.check_invariants(true, cx); @@ -2351,7 +2280,14 @@ mod tests { let context_lines = rng.random_range(0..2); editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx); + editor.update_excerpts_for_path( + path, + buffer, + ranges, + context_lines, + diff, + cx, + ); }); } 15..=29 => { @@ -2395,7 +2331,14 @@ mod tests { let buffer = buffer.clone(); editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx); + editor.update_excerpts_for_path( + path, + buffer, + ranges, + context_lines, + diff, + cx, + ); }); } 55..=64 => { @@ -2407,16 +2350,14 @@ mod tests { } 65..=74 => { log::info!("removing excerpts for a random path"); - let paths = editor.update(cx, |editor, cx| { - editor - .rhs_multibuffer - .read(cx) - .paths() - .cloned() - .collect::>() + let ids = editor.update(cx, |editor, cx| { + let snapshot = editor.rhs_multibuffer.read(cx).snapshot(cx); + snapshot.all_buffer_ids().collect::>() }); - if let Some(path) = paths.choose(rng) { + if let Some(id) = ids.choose(rng) { editor.update(cx, |editor, cx| { + let snapshot = editor.rhs_multibuffer.read(cx).snapshot(cx); + let path = snapshot.path_for_buffer(*id).unwrap(); editor.remove_excerpts_for_path(path.clone(), cx); }); } @@ -2432,18 +2373,21 @@ mod tests { }); } 80..=89 => { - let excerpt_ids = editor.update(cx, |editor, cx| { - editor.rhs_multibuffer.read(cx).excerpt_ids() + let snapshot = editor.update(cx, |editor, cx| { + editor.rhs_multibuffer.read(cx).snapshot(cx) }); - if !excerpt_ids.is_empty() { - let count = rng.random_range(1..=excerpt_ids.len().min(3)); + let excerpts = snapshot.excerpts().collect::>(); + if !excerpts.is_empty() { + let count = rng.random_range(1..=excerpts.len().min(3)); let chosen: Vec<_> = - excerpt_ids.choose_multiple(rng, count).copied().collect(); + excerpts.choose_multiple(rng, count).cloned().collect(); let line_count = rng.random_range(1..5); log::info!("expanding {count} excerpts by {line_count} lines"); editor.update(cx, |editor, cx| { editor.expand_excerpts( - chosen.into_iter(), + chosen.into_iter().map(|excerpt| { + snapshot.anchor_in_excerpt(excerpt.context.start).unwrap() + }), line_count, ExpandExcerptDirection::UpAndDown, cx, @@ -2474,7 +2418,7 @@ mod tests { .collect::>(); editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path(path, buffer, ranges, 2, diff, cx); + editor.update_excerpts_for_path(path, buffer, ranges, 2, diff, cx); }); } quiesced = true; @@ -2511,7 +2455,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path(path, buffer.clone(), ranges, 0, diff.clone(), cx); + editor.update_excerpts_for_path(path, buffer.clone(), ranges, 0, diff.clone(), cx); }); cx.run_until_parked(); @@ -2523,12 +2467,16 @@ mod tests { ); }); - let excerpt_ids = editor.update(cx, |editor, cx| { - editor.rhs_multibuffer.read(cx).excerpt_ids() + let excerpts = editor.update(cx, |editor, cx| { + let snapshot = editor.rhs_multibuffer.read(cx).snapshot(cx); + snapshot + .excerpts() + .map(|excerpt| snapshot.anchor_in_excerpt(excerpt.context.start).unwrap()) + .collect::>() }); editor.update(cx, |editor, cx| { editor.expand_excerpts( - excerpt_ids.iter().copied(), + excerpts.into_iter(), 2, multi_buffer::ExpandExcerptDirection::UpAndDown, cx, @@ -2564,7 +2512,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -2693,7 +2641,7 @@ mod tests { editor.update(cx, |editor, cx| { let path1 = PathKey::for_buffer(&buffer1, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path1, buffer1.clone(), vec![Point::new(0, 0)..buffer1.read(cx).max_point()], @@ -2702,7 +2650,7 @@ mod tests { cx, ); let path2 = PathKey::for_buffer(&buffer2, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path2, buffer2.clone(), vec![Point::new(0, 0)..buffer2.read(cx).max_point()], @@ -2851,7 +2799,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -2978,7 +2926,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3097,7 +3045,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3227,7 +3175,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3324,7 +3272,7 @@ mod tests { editor.update(cx, |editor, cx| { let end = Point::new(0, text.len() as u32); let path1 = PathKey::for_buffer(&buffer1, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path1, buffer1.clone(), vec![Point::new(0, 0)..end], @@ -3333,7 +3281,7 @@ mod tests { cx, ); let path2 = PathKey::for_buffer(&buffer2, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path2, buffer2.clone(), vec![Point::new(0, 0)..end], @@ -3401,7 +3349,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3464,7 +3412,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3525,7 +3473,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3641,7 +3589,7 @@ mod tests { editor.update(cx, |editor, cx| { let path1 = PathKey::for_buffer(&buffer1, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path1, buffer1.clone(), vec![Point::new(0, 0)..buffer1.read(cx).max_point()], @@ -3651,7 +3599,7 @@ mod tests { ); let path2 = PathKey::for_buffer(&buffer2, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path2, buffer2.clone(), vec![Point::new(0, 0)..buffer2.read(cx).max_point()], @@ -3749,7 +3697,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3825,7 +3773,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -3912,7 +3860,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -4026,7 +3974,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -4110,7 +4058,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -4194,7 +4142,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -4286,7 +4234,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -4414,7 +4362,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -4561,7 +4509,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -4783,7 +4731,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -5122,7 +5070,7 @@ mod tests { editor.update(cx, |editor, cx| { let path1 = PathKey::for_buffer(&buffer1, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path1, buffer1.clone(), vec![Point::new(0, 0)..buffer1.read(cx).max_point()], @@ -5131,7 +5079,7 @@ mod tests { cx, ); let path2 = PathKey::for_buffer(&buffer2, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path2, buffer2.clone(), vec![Point::new(0, 0)..buffer2.read(cx).max_point()], @@ -5287,7 +5235,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -5448,7 +5396,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -5607,7 +5555,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -5738,7 +5686,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![ @@ -5799,7 +5747,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..buffer.read(cx).max_point()], @@ -5882,7 +5830,7 @@ mod tests { editor.update(cx, |editor, cx| { let path = PathKey::for_buffer(&buffer, cx); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path, buffer.clone(), vec![Point::new(0, 0)..Point::new(3, 3)], @@ -5994,7 +5942,7 @@ mod tests { let path_b = cx.read(|cx| PathKey::for_buffer(&buffer_b, cx)); editor.update(cx, |editor, cx| { - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path_a.clone(), buffer_a.clone(), vec![Point::new(0, 0)..buffer_a.read(cx).max_point()], @@ -6002,7 +5950,7 @@ mod tests { diff_a.clone(), cx, ); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path_b.clone(), buffer_b.clone(), vec![Point::new(0, 0)..buffer_b.read(cx).max_point()], @@ -6032,7 +5980,7 @@ mod tests { cx.run_until_parked(); editor.update(cx, |editor, cx| { - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path_a.clone(), buffer_a.clone(), vec![Point::new(0, 0)..buffer_a.read(cx).max_point()], @@ -6089,7 +6037,7 @@ mod tests { }; editor.update(cx, |editor, cx| { - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path_key_1.clone(), buffer.clone(), vec![Point::new(0, 0)..Point::new(1, 0)], @@ -6097,7 +6045,7 @@ mod tests { diff.clone(), cx, ); - editor.set_excerpts_for_path( + editor.update_excerpts_for_path( path_key_2.clone(), buffer.clone(), vec![Point::new(1, 0)..buffer.read(cx).max_point()], diff --git a/crates/editor/src/split_editor_view.rs b/crates/editor/src/split_editor_view.rs index 454013c530ab8389314892011e5eb115ee6e0957..02388df9a7516e72810b91d65292795e6375470e 100644 --- a/crates/editor/src/split_editor_view.rs +++ b/crates/editor/src/split_editor_view.rs @@ -7,7 +7,7 @@ use gpui::{ ParentElement, Pixels, StatefulInteractiveElement, Styled, TextStyleRefinement, Window, div, linear_color_stop, linear_gradient, point, px, size, }; -use multi_buffer::{Anchor, ExcerptId}; +use multi_buffer::{Anchor, ExcerptBoundaryInfo}; use settings::Settings; use smallvec::smallvec; use text::BufferId; @@ -429,7 +429,7 @@ impl SplitBufferHeadersElement { let sticky_header_excerpt_id = snapshot .sticky_header_excerpt(scroll_position.y) - .map(|e| e.excerpt.id); + .map(|e| e.excerpt); let non_sticky_headers = self.build_non_sticky_headers( &snapshot, @@ -476,9 +476,10 @@ impl SplitBufferHeadersElement { let mut anchors_by_buffer: HashMap = HashMap::default(); for selection in all_anchor_selections.iter() { let head = selection.head(); - if let Some(buffer_id) = head.text_anchor.buffer_id { + if let Some((text_anchor, _)) = snapshot.buffer_snapshot().anchor_to_buffer_anchor(head) + { anchors_by_buffer - .entry(buffer_id) + .entry(text_anchor.buffer_id) .and_modify(|(latest_id, latest_anchor)| { if selection.id > *latest_id { *latest_id = selection.id; @@ -520,7 +521,7 @@ impl SplitBufferHeadersElement { ); let editor_bg_color = cx.theme().colors().editor_background; - let selected = selected_buffer_ids.contains(&excerpt.buffer_id); + let selected = selected_buffer_ids.contains(&excerpt.buffer_id()); let mut header = v_flex() .id("sticky-buffer-header") @@ -594,7 +595,7 @@ impl SplitBufferHeadersElement { end_row: DisplayRow, selected_buffer_ids: &HashSet, latest_selection_anchors: &HashMap, - sticky_header_excerpt_id: Option, + sticky_header: Option<&ExcerptBoundaryInfo>, window: &mut Window, cx: &mut App, ) -> Vec { @@ -603,7 +604,7 @@ impl SplitBufferHeadersElement { for (block_row, block) in snapshot.blocks_in_range(start_row..end_row) { let (excerpt, is_folded) = match block { Block::BufferHeader { excerpt, .. } => { - if sticky_header_excerpt_id == Some(excerpt.id) { + if sticky_header == Some(excerpt) { continue; } (excerpt, false) @@ -613,7 +614,7 @@ impl SplitBufferHeadersElement { Block::ExcerptBoundary { .. } | Block::Custom(_) | Block::Spacer { .. } => continue, }; - let selected = selected_buffer_ids.contains(&excerpt.buffer_id); + let selected = selected_buffer_ids.contains(&excerpt.buffer_id()); let jump_data = header_jump_data( snapshot, block_row, diff --git a/crates/editor/src/tasks.rs b/crates/editor/src/tasks.rs new file mode 100644 index 0000000000000000000000000000000000000000..7323d4159cec58a5a7db7daa42ca201125200fae --- /dev/null +++ b/crates/editor/src/tasks.rs @@ -0,0 +1,101 @@ +use crate::Editor; + +use collections::HashMap; +use gpui::{App, Task, Window}; +use lsp::LanguageServerName; +use project::{Location, project_settings::ProjectSettings}; +use settings::Settings as _; +use task::{TaskContext, TaskVariables, VariableName}; +use text::{BufferId, ToOffset, ToPoint}; + +impl Editor { + pub fn task_context(&self, window: &mut Window, cx: &mut App) -> Task> { + let Some(project) = self.project.clone() else { + return Task::ready(None); + }; + let display_snapshot = self.display_snapshot(cx); + let selection = self.selections.newest_adjusted(&display_snapshot); + let start = display_snapshot + .buffer_snapshot() + .anchor_after(selection.start); + let end = display_snapshot + .buffer_snapshot() + .anchor_after(selection.end); + let Some((buffer_snapshot, range)) = display_snapshot + .buffer_snapshot() + .anchor_range_to_buffer_anchor_range(start..end) + else { + return Task::ready(None); + }; + let Some(buffer) = self.buffer.read(cx).buffer(buffer_snapshot.remote_id()) else { + return Task::ready(None); + }; + let location = Location { buffer, range }; + let captured_variables = { + let mut variables = TaskVariables::default(); + let buffer = location.buffer.read(cx); + let buffer_id = buffer.remote_id(); + let snapshot = buffer.snapshot(); + let starting_point = location.range.start.to_point(&snapshot); + let starting_offset = starting_point.to_offset(&snapshot); + for (_, tasks) in self + .tasks + .range((buffer_id, 0)..(buffer_id, starting_point.row + 1)) + { + if !tasks + .context_range + .contains(&crate::BufferOffset(starting_offset)) + { + continue; + } + for (capture_name, value) in tasks.extra_variables.iter() { + variables.insert( + VariableName::Custom(capture_name.to_owned().into()), + value.clone(), + ); + } + } + variables + }; + + project.update(cx, |project, cx| { + project.task_store().update(cx, |task_store, cx| { + task_store.task_context_for_location(captured_variables, location, cx) + }) + }) + } + + pub fn lsp_task_sources(&self, cx: &App) -> HashMap> { + let lsp_settings = &ProjectSettings::get_global(cx).lsp; + + self.buffer() + .read(cx) + .all_buffers() + .into_iter() + .filter_map(|buffer| { + let lsp_tasks_source = buffer + .read(cx) + .language()? + .context_provider()? + .lsp_task_source()?; + if lsp_settings + .get(&lsp_tasks_source) + .is_none_or(|s| s.enable_lsp_tasks) + { + let buffer_id = buffer.read(cx).remote_id(); + Some((lsp_tasks_source, buffer_id)) + } else { + None + } + }) + .fold( + HashMap::default(), + |mut acc, (lsp_task_source, buffer_id)| { + acc.entry(lsp_task_source) + .or_insert_with(Vec::new) + .push(buffer_id); + acc + }, + ) + } +} diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index bef2b3fc3ec2b949ffb8288d59b1201f6f3dde90..22f686668bd98b4c5b5235e34c0881d6583ed3bc 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -245,7 +245,7 @@ pub fn editor_content_with_blocks_and_size( format!( "§ {}", first_excerpt - .buffer + .buffer(snapshot.buffer_snapshot()) .file() .map(|file| file.file_name(cx)) .unwrap_or("") @@ -274,7 +274,7 @@ pub fn editor_content_with_blocks_and_size( format!( "§ {}", excerpt - .buffer + .buffer(snapshot.buffer_snapshot()) .file() .map(|file| file.file_name(cx)) .unwrap_or("") diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 101c1559a7a0fb6e5d0d5bba7281a0cb78ab4b65..84b03d91ca1cf2e0ba858398bcf8134ce16edb41 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -1,5 +1,5 @@ use crate::{ - AnchorRangeExt, DisplayPoint, Editor, ExcerptId, MultiBuffer, MultiBufferSnapshot, RowExt, + DisplayPoint, Editor, MultiBuffer, MultiBufferSnapshot, RowExt, display_map::{HighlightKey, ToDisplayPoint}, }; use buffer_diff::DiffHunkStatusKind; @@ -13,7 +13,9 @@ use gpui::{ }; use itertools::Itertools; use language::{Buffer, BufferSnapshot, LanguageRegistry}; -use multi_buffer::{Anchor, ExcerptRange, MultiBufferOffset, MultiBufferRow, PathKey}; +use multi_buffer::{ + Anchor, AnchorRangeExt, ExcerptRange, MultiBufferOffset, MultiBufferRow, PathKey, +}; use parking_lot::RwLock; use project::{FakeFs, Project}; use std::{ @@ -464,7 +466,21 @@ impl EditorTestContext { let selections = editor.selections.disjoint_anchors_arc(); let excerpts = multibuffer_snapshot .excerpts() - .map(|(e_id, snapshot, range)| (e_id, snapshot.clone(), range)) + .map(|info| { + ( + multibuffer_snapshot + .buffer_for_id(info.context.start.buffer_id) + .cloned() + .unwrap(), + multibuffer_snapshot + .anchor_in_excerpt(info.context.start) + .unwrap() + ..multibuffer_snapshot + .anchor_in_excerpt(info.context.end) + .unwrap(), + info, + ) + }) .collect::>(); (multibuffer_snapshot, selections, excerpts) @@ -478,14 +494,23 @@ impl EditorTestContext { fmt_additional_notes(), ); - for (ix, (excerpt_id, snapshot, range)) in excerpts.into_iter().enumerate() { + for (ix, (snapshot, multibuffer_range, excerpt_range)) in excerpts.into_iter().enumerate() { let is_folded = self .update_editor(|editor, _, cx| editor.is_buffer_folded(snapshot.remote_id(), cx)); let (expected_text, expected_selections) = marked_text_ranges(expected_excerpts[ix], true); if expected_text == "[FOLDED]\n" { assert!(is_folded, "excerpt {} should be folded", ix); - let is_selected = selections.iter().any(|s| s.head().excerpt_id == excerpt_id); + let is_selected = selections.iter().any(|s| { + multibuffer_range + .start + .cmp(&s.head(), &multibuffer_snapshot) + .is_le() + && multibuffer_range + .end + .cmp(&s.head(), &multibuffer_snapshot) + .is_ge() + }); if !expected_selections.is_empty() { assert!( is_selected, @@ -510,7 +535,7 @@ impl EditorTestContext { ); assert_eq!( multibuffer_snapshot - .text_for_range(Anchor::range_in_buffer(excerpt_id, range.context.clone())) + .text_for_range(multibuffer_range.clone()) .collect::(), expected_text, "{}", @@ -519,13 +544,24 @@ impl EditorTestContext { let selections = selections .iter() - .filter(|s| s.head().excerpt_id == excerpt_id) - .map(|s| { - let head = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot) - - text::ToOffset::to_offset(&range.context.start, &snapshot); - let tail = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot) - - text::ToOffset::to_offset(&range.context.start, &snapshot); - tail..head + .filter(|s| { + multibuffer_range + .start + .cmp(&s.head(), &multibuffer_snapshot) + .is_le() + && multibuffer_range + .end + .cmp(&s.head(), &multibuffer_snapshot) + .is_ge() + }) + .filter_map(|s| { + let (head_anchor, buffer_snapshot) = + multibuffer_snapshot.anchor_to_buffer_anchor(s.head())?; + let head = text::ToOffset::to_offset(&head_anchor, buffer_snapshot) + - text::ToOffset::to_offset(&excerpt_range.context.start, buffer_snapshot); + let tail = text::ToOffset::to_offset(&head_anchor, buffer_snapshot) + - text::ToOffset::to_offset(&excerpt_range.context.start, buffer_snapshot); + Some(tail..head) }) .collect::>(); // todo: selections that cross excerpt boundaries.. @@ -546,9 +582,12 @@ impl EditorTestContext { let selections = editor.selections.disjoint_anchors_arc().to_vec(); let excerpts = multibuffer_snapshot .excerpts() - .map(|(e_id, snapshot, range)| { - let is_folded = editor.is_buffer_folded(snapshot.remote_id(), cx); - (e_id, snapshot.clone(), range, is_folded) + .map(|info| { + let buffer_snapshot = multibuffer_snapshot + .buffer_for_id(info.context.start.buffer_id) + .unwrap(); + let is_folded = editor.is_buffer_folded(buffer_snapshot.remote_id(), cx); + (buffer_snapshot.clone(), info, is_folded) }) .collect::>(); @@ -673,7 +712,7 @@ impl EditorTestContext { struct FormatMultiBufferAsMarkedText { multibuffer_snapshot: MultiBufferSnapshot, selections: Vec>, - excerpts: Vec<(ExcerptId, BufferSnapshot, ExcerptRange, bool)>, + excerpts: Vec<(BufferSnapshot, ExcerptRange, bool)>, } impl std::fmt::Display for FormatMultiBufferAsMarkedText { @@ -684,25 +723,40 @@ impl std::fmt::Display for FormatMultiBufferAsMarkedText { excerpts, } = self; - for (excerpt_id, snapshot, range, is_folded) in excerpts.into_iter() { + for (_snapshot, range, is_folded) in excerpts.into_iter() { write!(f, "[EXCERPT]\n")?; if *is_folded { write!(f, "[FOLDED]\n")?; } + let multibuffer_range = multibuffer_snapshot + .buffer_anchor_range_to_anchor_range(range.context.clone()) + .unwrap(); + let mut text = multibuffer_snapshot - .text_for_range(Anchor::range_in_buffer(*excerpt_id, range.context.clone())) + .text_for_range(multibuffer_range.clone()) .collect::(); let selections = selections .iter() - .filter(|&s| s.head().excerpt_id == *excerpt_id) - .map(|s| { - let head = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot) - - text::ToOffset::to_offset(&range.context.start, &snapshot); - let tail = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot) - - text::ToOffset::to_offset(&range.context.start, &snapshot); - tail..head + .filter(|&s| { + multibuffer_range + .start + .cmp(&s.head(), multibuffer_snapshot) + .is_le() + && multibuffer_range + .end + .cmp(&s.head(), multibuffer_snapshot) + .is_ge() + }) + .filter_map(|s| { + let (head_anchor, buffer_snapshot) = + multibuffer_snapshot.anchor_to_buffer_anchor(s.head())?; + let head = text::ToOffset::to_offset(&head_anchor, buffer_snapshot) + - text::ToOffset::to_offset(&range.context.start, buffer_snapshot); + let tail = text::ToOffset::to_offset(&head_anchor, buffer_snapshot) + - text::ToOffset::to_offset(&range.context.start, buffer_snapshot); + Some(tail..head) }) .rev() .collect::>(); diff --git a/crates/encoding_selector/src/active_buffer_encoding.rs b/crates/encoding_selector/src/active_buffer_encoding.rs index 417ff241b72300aa90496f896fcf6c3ed3a363c7..42fd5f662f66c8e9f1eaa18953c6765c51244e77 100644 --- a/crates/encoding_selector/src/active_buffer_encoding.rs +++ b/crates/encoding_selector/src/active_buffer_encoding.rs @@ -47,7 +47,7 @@ impl ActiveBufferEncoding { self.is_shared = project.is_shared(); self.is_via_remote_server = project.is_via_remote_server(); - if let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx) { + if let Some(buffer) = editor.read(cx).active_buffer(cx) { let buffer = buffer.read(cx); self.active_encoding = Some(buffer.encoding()); self.has_bom = buffer.has_bom(); diff --git a/crates/encoding_selector/src/encoding_selector.rs b/crates/encoding_selector/src/encoding_selector.rs index 3954bf29a30a0981c25bee3eb88829a7002881ad..e99b475de6773c647ef19195ef42052d37769346 100644 --- a/crates/encoding_selector/src/encoding_selector.rs +++ b/crates/encoding_selector/src/encoding_selector.rs @@ -47,11 +47,11 @@ impl EncodingSelector { window: &mut Window, cx: &mut Context, ) -> Option<()> { - let (_, buffer, _) = workspace + let buffer = workspace .active_item(cx)? .act_as::(cx)? .read(cx) - .active_excerpt(cx)?; + .active_buffer(cx)?; let buffer_handle = buffer.read(cx); let project = workspace.project().read(cx); diff --git a/crates/env_var/Cargo.toml b/crates/env_var/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..2cbbd08c7833d3e57a09766d42ffffe35c620a93 --- /dev/null +++ b/crates/env_var/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "env_var" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/env_var.rs" + +[dependencies] +gpui.workspace = true diff --git a/crates/env_var/LICENSE-GPL b/crates/env_var/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/env_var/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/env_var/src/env_var.rs b/crates/env_var/src/env_var.rs new file mode 100644 index 0000000000000000000000000000000000000000..79f671e0147ebfaad4ab76a123cc477dc7e55cb7 --- /dev/null +++ b/crates/env_var/src/env_var.rs @@ -0,0 +1,40 @@ +use gpui::SharedString; + +#[derive(Clone)] +pub struct EnvVar { + pub name: SharedString, + /// Value of the environment variable. Also `None` when set to an empty string. + pub value: Option, +} + +impl EnvVar { + pub fn new(name: SharedString) -> Self { + let value = std::env::var(name.as_str()).ok(); + if value.as_ref().is_some_and(|v| v.is_empty()) { + Self { name, value: None } + } else { + Self { name, value } + } + } + + pub fn or(self, other: EnvVar) -> EnvVar { + if self.value.is_some() { self } else { other } + } +} + +/// Creates a `LazyLock` expression for use in a `static` declaration. +#[macro_export] +macro_rules! env_var { + ($name:expr) => { + ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into())) + }; +} + +/// Generates a `LazyLock` expression for use in a `static` declaration. Checks if the +/// environment variable exists and is non-empty. +#[macro_export] +macro_rules! bool_env_var { + ($name:expr) => { + ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into()).value.is_some()) + }; +} diff --git a/crates/eval_cli/src/headless.rs b/crates/eval_cli/src/headless.rs index 72feaacbae270224240f1da9e6e6c1008ba97c84..0ddd99e8f8abd9dbd73e1d7461526f3e7cb24f11 100644 --- a/crates/eval_cli/src/headless.rs +++ b/crates/eval_cli/src/headless.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; use std::sync::Arc; -use client::{Client, ProxySettings, UserStore}; +use client::{Client, ProxySettings, RefreshLlmTokenListener, UserStore}; use db::AppDatabase; use extension::ExtensionHostProxy; use fs::RealFs; @@ -108,7 +108,8 @@ pub fn init(cx: &mut App) -> Arc { let extension_host_proxy = ExtensionHostProxy::global(cx); debug_adapter_extension::init(extension_host_proxy.clone(), cx); language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone()); - language_model::init(user_store.clone(), client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx); language_models::init(user_store.clone(), client.clone(), cx); languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx); prompt_store::init(cx); diff --git a/crates/extension/src/extension_builder.rs b/crates/extension/src/extension_builder.rs index 1c204398c34728cab6b05687050243b4a988902c..f0e789994127c9347c8eb6b8d16417ba7eaaf831 100644 --- a/crates/extension/src/extension_builder.rs +++ b/crates/extension/src/extension_builder.rs @@ -296,16 +296,12 @@ impl ExtensionBuilder { let remotes_output = util::command::new_command("git") .arg("--git-dir") .arg(&git_dir) - .args(["remote", "-v"]) + .args(["remote", "get-url", "origin"]) + .env("GIT_CONFIG_GLOBAL", "/dev/null") .output() .await?; let has_remote = remotes_output.status.success() - && String::from_utf8_lossy(&remotes_output.stdout) - .lines() - .any(|line| { - let mut parts = line.split(|c: char| c.is_whitespace()); - parts.next() == Some("origin") && parts.any(|part| part == url) - }); + && String::from_utf8_lossy(&remotes_output.stdout).trim() == url; if !has_remote { bail!( "grammar directory '{}' already exists, but is not a git clone of '{}'", diff --git a/crates/feature_flags/src/flags.rs b/crates/feature_flags/src/flags.rs index 4d477aa4b393ee8b04829833324cd9092c2a04cd..54dc96ad37f8e51a1074a0a32976f8236cb1a0ed 100644 --- a/crates/feature_flags/src/flags.rs +++ b/crates/feature_flags/src/flags.rs @@ -47,12 +47,6 @@ impl FeatureFlag for DiffReviewFeatureFlag { } } -pub struct GitGraphFeatureFlag; - -impl FeatureFlag for GitGraphFeatureFlag { - const NAME: &'static str = "git-graph"; -} - pub struct StreamingEditFileToolFeatureFlag; impl FeatureFlag for StreamingEditFileToolFeatureFlag { diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index 12a095ffe27aa760623fa2b6ce674fdd9008eef1..751796fb83164b78dc5d6789f0ae7870eff16ce1 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -10,6 +10,7 @@ use git::{ GRAPH_CHUNK_SIZE, GitRepository, GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote, RepoPath, ResetMode, SearchCommitArgs, Worktree, }, + stash::GitStash, status::{ DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus, UnmergedStatus, @@ -35,8 +36,16 @@ pub struct FakeGitRepository { pub(crate) is_trusted: Arc, } +#[derive(Debug, Clone)] +pub struct FakeCommitSnapshot { + pub head_contents: HashMap, + pub index_contents: HashMap, + pub sha: String, +} + #[derive(Debug, Clone)] pub struct FakeGitRepositoryState { + pub commit_history: Vec, pub event_emitter: smol::channel::Sender, pub unmerged_paths: HashMap, pub head_contents: HashMap, @@ -53,7 +62,7 @@ pub struct FakeGitRepositoryState { pub simulated_create_worktree_error: Option, pub refs: HashMap, pub graph_commits: Vec>, - pub worktrees: Vec, + pub stash_entries: GitStash, } impl FakeGitRepositoryState { @@ -73,7 +82,8 @@ impl FakeGitRepositoryState { oids: Default::default(), remotes: HashMap::default(), graph_commits: Vec::new(), - worktrees: Vec::new(), + commit_history: Vec::new(), + stash_entries: Default::default(), } } } @@ -216,11 +226,52 @@ impl GitRepository for FakeGitRepository { fn reset( &self, - _commit: String, - _mode: ResetMode, + commit: String, + mode: ResetMode, _env: Arc>, ) -> BoxFuture<'_, Result<()>> { - unimplemented!() + self.with_state_async(true, move |state| { + let pop_count = if commit == "HEAD~" || commit == "HEAD^" { + 1 + } else if let Some(suffix) = commit.strip_prefix("HEAD~") { + suffix + .parse::() + .with_context(|| format!("Invalid HEAD~ offset: {commit}"))? + } else { + match state + .commit_history + .iter() + .rposition(|entry| entry.sha == commit) + { + Some(index) => state.commit_history.len() - index, + None => anyhow::bail!("Unknown commit ref: {commit}"), + } + }; + + if pop_count == 0 || pop_count > state.commit_history.len() { + anyhow::bail!( + "Cannot reset {pop_count} commit(s): only {} in history", + state.commit_history.len() + ); + } + + let target_index = state.commit_history.len() - pop_count; + let snapshot = state.commit_history[target_index].clone(); + state.commit_history.truncate(target_index); + + match mode { + ResetMode::Soft => { + state.head_contents = snapshot.head_contents; + } + ResetMode::Mixed => { + state.head_contents = snapshot.head_contents; + state.index_contents = state.head_contents.clone(); + } + } + + state.refs.insert("HEAD".into(), snapshot.sha); + Ok(()) + }) } fn checkout_files( @@ -380,13 +431,13 @@ impl GitRepository for FakeGitRepository { } fn stash_entries(&self) -> BoxFuture<'_, Result> { - async { Ok(git::stash::GitStash::default()) }.boxed() + self.with_state_async(false, |state| Ok(state.stash_entries.clone())) } fn branches(&self) -> BoxFuture<'_, Result>> { self.with_state_async(false, move |state| { let current_branch = &state.current_branch_name; - Ok(state + let mut branches = state .branches .iter() .map(|branch_name| { @@ -404,78 +455,163 @@ impl GitRepository for FakeGitRepository { upstream: None, } }) - .collect()) + .collect::>(); + // compute snapshot expects these to be sorted by ref_name + // because that's what git itself does + branches.sort_by(|a, b| a.ref_name.cmp(&b.ref_name)); + Ok(branches) }) } fn worktrees(&self) -> BoxFuture<'_, Result>> { - let dot_git_path = self.dot_git_path.clone(); - self.with_state_async(false, move |state| { - let work_dir = dot_git_path - .parent() - .map(PathBuf::from) - .unwrap_or(dot_git_path); - let head_sha = state - .refs - .get("HEAD") - .cloned() - .unwrap_or_else(|| "0000000".to_string()); - let branch_ref = state - .current_branch_name - .as_ref() - .map(|name| format!("refs/heads/{name}")) - .unwrap_or_else(|| "refs/heads/main".to_string()); - let main_worktree = Worktree { - path: work_dir, - ref_name: Some(branch_ref.into()), - sha: head_sha.into(), - is_main: true, - }; + let fs = self.fs.clone(); + let common_dir_path = self.common_dir_path.clone(); + let executor = self.executor.clone(); + + async move { + executor.simulate_random_delay().await; + + let (main_worktree, refs) = fs.with_git_state(&common_dir_path, false, |state| { + let work_dir = common_dir_path + .parent() + .map(PathBuf::from) + .unwrap_or_else(|| common_dir_path.clone()); + let head_sha = state + .refs + .get("HEAD") + .cloned() + .unwrap_or_else(|| "0000000".to_string()); + let branch_ref = state + .current_branch_name + .as_ref() + .map(|name| format!("refs/heads/{name}")) + .unwrap_or_else(|| "refs/heads/main".to_string()); + let main_wt = Worktree { + path: work_dir, + ref_name: Some(branch_ref.into()), + sha: head_sha.into(), + is_main: true, + }; + (main_wt, state.refs.clone()) + })?; + let mut all = vec![main_worktree]; - all.extend(state.worktrees.iter().cloned()); + + let worktrees_dir = common_dir_path.join("worktrees"); + if let Ok(mut entries) = fs.read_dir(&worktrees_dir).await { + use futures::StreamExt; + while let Some(Ok(entry_path)) = entries.next().await { + let head_content = match fs.load(&entry_path.join("HEAD")).await { + Ok(content) => content, + Err(_) => continue, + }; + let gitdir_content = match fs.load(&entry_path.join("gitdir")).await { + Ok(content) => content, + Err(_) => continue, + }; + + let ref_name = head_content + .strip_prefix("ref: ") + .map(|s| s.trim().to_string()); + let sha = ref_name + .as_ref() + .and_then(|r| refs.get(r)) + .cloned() + .unwrap_or_else(|| head_content.trim().to_string()); + + let worktree_path = PathBuf::from(gitdir_content.trim()) + .parent() + .map(PathBuf::from) + .unwrap_or_default(); + + all.push(Worktree { + path: worktree_path, + ref_name: ref_name.map(Into::into), + sha: sha.into(), + is_main: false, + }); + } + } + Ok(all) - }) + } + .boxed() } fn create_worktree( &self, - branch_name: String, + branch_name: Option, path: PathBuf, from_commit: Option, ) -> BoxFuture<'_, Result<()>> { let fs = self.fs.clone(); let executor = self.executor.clone(); let dot_git_path = self.dot_git_path.clone(); + let common_dir_path = self.common_dir_path.clone(); async move { executor.simulate_random_delay().await; - // Check for simulated error before any side effects + // Check for simulated error and duplicate branch before any side effects. fs.with_git_state(&dot_git_path, false, |state| { if let Some(message) = &state.simulated_create_worktree_error { anyhow::bail!("{message}"); } + if let Some(ref name) = branch_name { + if state.branches.contains(name) { + bail!("a branch named '{}' already exists", name); + } + } Ok(()) })??; - // Create directory before updating state so state is never - // inconsistent with the filesystem + + // Create the worktree checkout directory. fs.create_dir(&path).await?; - fs.with_git_state(&dot_git_path, true, { - let path = path.clone(); - move |state| { - if state.branches.contains(&branch_name) { - bail!("a branch named '{}' already exists", branch_name); - } + + // Create .git/worktrees// directory with HEAD, commondir, gitdir. + let worktree_entry_name = branch_name + .as_deref() + .unwrap_or_else(|| path.file_name().unwrap().to_str().unwrap()); + let worktrees_entry_dir = common_dir_path.join("worktrees").join(worktree_entry_name); + fs.create_dir(&worktrees_entry_dir).await?; + + let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string()); + let head_content = if let Some(ref branch_name) = branch_name { + let ref_name = format!("refs/heads/{branch_name}"); + format!("ref: {ref_name}") + } else { + sha.clone() + }; + fs.write_file_internal( + worktrees_entry_dir.join("HEAD"), + head_content.into_bytes(), + false, + )?; + fs.write_file_internal( + worktrees_entry_dir.join("commondir"), + common_dir_path.to_string_lossy().into_owned().into_bytes(), + false, + )?; + let worktree_dot_git = path.join(".git"); + fs.write_file_internal( + worktrees_entry_dir.join("gitdir"), + worktree_dot_git.to_string_lossy().into_owned().into_bytes(), + false, + )?; + + // Create .git file in the worktree checkout. + fs.write_file_internal( + &worktree_dot_git, + format!("gitdir: {}", worktrees_entry_dir.display()).into_bytes(), + false, + )?; + + // Update git state: add ref and branch. + fs.with_git_state(&dot_git_path, true, move |state| { + if let Some(branch_name) = branch_name { let ref_name = format!("refs/heads/{branch_name}"); - let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string()); - state.refs.insert(ref_name.clone(), sha.clone()); - state.worktrees.push(Worktree { - path, - ref_name: Some(ref_name.into()), - sha: sha.into(), - is_main: false, - }); + state.refs.insert(ref_name, sha); state.branches.insert(branch_name); - Ok::<(), anyhow::Error>(()) } + Ok::<(), anyhow::Error>(()) })??; Ok(()) } @@ -485,20 +621,23 @@ impl GitRepository for FakeGitRepository { fn remove_worktree(&self, path: PathBuf, _force: bool) -> BoxFuture<'_, Result<()>> { let fs = self.fs.clone(); let executor = self.executor.clone(); - let dot_git_path = self.dot_git_path.clone(); + let common_dir_path = self.common_dir_path.clone(); async move { executor.simulate_random_delay().await; - // Validate the worktree exists in state before touching the filesystem - fs.with_git_state(&dot_git_path, false, { - let path = path.clone(); - move |state| { - if !state.worktrees.iter().any(|w| w.path == path) { - bail!("no worktree found at path: {}", path.display()); - } - Ok(()) - } - })??; - // Now remove the directory + + // Read the worktree's .git file to find its entry directory. + let dot_git_file = path.join(".git"); + let content = fs + .load(&dot_git_file) + .await + .with_context(|| format!("no worktree found at path: {}", path.display()))?; + let gitdir = content + .strip_prefix("gitdir:") + .context("invalid .git file in worktree")? + .trim(); + let worktree_entry_dir = PathBuf::from(gitdir); + + // Remove the worktree checkout directory. fs.remove_dir( &path, RemoveOptions { @@ -507,11 +646,21 @@ impl GitRepository for FakeGitRepository { }, ) .await?; - // Update state - fs.with_git_state(&dot_git_path, true, move |state| { - state.worktrees.retain(|worktree| worktree.path != path); - Ok::<(), anyhow::Error>(()) - })??; + + // Remove the .git/worktrees// directory. + fs.remove_dir( + &worktree_entry_dir, + RemoveOptions { + recursive: true, + ignore_if_not_exists: false, + }, + ) + .await?; + + // Emit a git event on the main .git directory so the scanner + // notices the change. + fs.with_git_state(&common_dir_path, true, |_| {})?; + Ok(()) } .boxed() @@ -520,20 +669,23 @@ impl GitRepository for FakeGitRepository { fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> { let fs = self.fs.clone(); let executor = self.executor.clone(); - let dot_git_path = self.dot_git_path.clone(); + let common_dir_path = self.common_dir_path.clone(); async move { executor.simulate_random_delay().await; - // Validate the worktree exists in state before touching the filesystem - fs.with_git_state(&dot_git_path, false, { - let old_path = old_path.clone(); - move |state| { - if !state.worktrees.iter().any(|w| w.path == old_path) { - bail!("no worktree found at path: {}", old_path.display()); - } - Ok(()) - } - })??; - // Now move the directory + + // Read the worktree's .git file to find its entry directory. + let dot_git_file = old_path.join(".git"); + let content = fs + .load(&dot_git_file) + .await + .with_context(|| format!("no worktree found at path: {}", old_path.display()))?; + let gitdir = content + .strip_prefix("gitdir:") + .context("invalid .git file in worktree")? + .trim(); + let worktree_entry_dir = PathBuf::from(gitdir); + + // Move the worktree checkout directory. fs.rename( &old_path, &new_path, @@ -544,16 +696,27 @@ impl GitRepository for FakeGitRepository { }, ) .await?; - // Update state - fs.with_git_state(&dot_git_path, true, move |state| { - let worktree = state - .worktrees - .iter_mut() - .find(|worktree| worktree.path == old_path) - .expect("worktree was validated above"); - worktree.path = new_path; - Ok::<(), anyhow::Error>(()) - })??; + + // Update the gitdir file in .git/worktrees// to point to the + // new location. + let new_dot_git = new_path.join(".git"); + fs.write_file_internal( + worktree_entry_dir.join("gitdir"), + new_dot_git.to_string_lossy().into_owned().into_bytes(), + false, + )?; + + // Update the .git file in the moved worktree checkout. + fs.write_file_internal( + &new_dot_git, + format!("gitdir: {}", worktree_entry_dir.display()).into_bytes(), + false, + )?; + + // Emit a git event on the main .git directory so the scanner + // notices the change. + fs.with_git_state(&common_dir_path, true, |_| {})?; + Ok(()) } .boxed() @@ -722,11 +885,30 @@ impl GitRepository for FakeGitRepository { &self, _message: gpui::SharedString, _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>, - _options: CommitOptions, + options: CommitOptions, _askpass: AskPassDelegate, _env: Arc>, ) -> BoxFuture<'_, Result<()>> { - async { Ok(()) }.boxed() + self.with_state_async(true, move |state| { + if !options.allow_empty && !options.amend && state.index_contents == state.head_contents + { + anyhow::bail!("nothing to commit (use allow_empty to create an empty commit)"); + } + + let old_sha = state.refs.get("HEAD").cloned().unwrap_or_default(); + state.commit_history.push(FakeCommitSnapshot { + head_contents: state.head_contents.clone(), + index_contents: state.index_contents.clone(), + sha: old_sha, + }); + + state.head_contents = state.index_contents.clone(); + + let new_sha = format!("fake-commit-{}", state.commit_history.len()); + state.refs.insert("HEAD".into(), new_sha); + + Ok(()) + }) } fn run_hook( @@ -960,10 +1142,88 @@ impl GitRepository for FakeGitRepository { fn diff_checkpoints( &self, - _base_checkpoint: GitRepositoryCheckpoint, - _target_checkpoint: GitRepositoryCheckpoint, + base_checkpoint: GitRepositoryCheckpoint, + target_checkpoint: GitRepositoryCheckpoint, ) -> BoxFuture<'_, Result> { - unimplemented!() + let executor = self.executor.clone(); + let checkpoints = self.checkpoints.clone(); + async move { + executor.simulate_random_delay().await; + let checkpoints = checkpoints.lock(); + let base = checkpoints + .get(&base_checkpoint.commit_sha) + .context(format!( + "invalid base checkpoint: {}", + base_checkpoint.commit_sha + ))?; + let target = checkpoints + .get(&target_checkpoint.commit_sha) + .context(format!( + "invalid target checkpoint: {}", + target_checkpoint.commit_sha + ))?; + + fn collect_files( + entry: &FakeFsEntry, + prefix: String, + out: &mut std::collections::BTreeMap, + ) { + match entry { + FakeFsEntry::File { content, .. } => { + out.insert(prefix, String::from_utf8_lossy(content).into_owned()); + } + FakeFsEntry::Dir { entries, .. } => { + for (name, child) in entries { + let path = if prefix.is_empty() { + name.clone() + } else { + format!("{prefix}/{name}") + }; + collect_files(child, path, out); + } + } + FakeFsEntry::Symlink { .. } => {} + } + } + + let mut base_files = std::collections::BTreeMap::new(); + let mut target_files = std::collections::BTreeMap::new(); + collect_files(base, String::new(), &mut base_files); + collect_files(target, String::new(), &mut target_files); + + let all_paths: std::collections::BTreeSet<&String> = + base_files.keys().chain(target_files.keys()).collect(); + + let mut diff = String::new(); + for path in all_paths { + match (base_files.get(path), target_files.get(path)) { + (Some(base_content), Some(target_content)) + if base_content != target_content => + { + diff.push_str(&format!("diff --git a/{path} b/{path}\n")); + diff.push_str(&format!("--- a/{path}\n")); + diff.push_str(&format!("+++ b/{path}\n")); + for line in base_content.lines() { + diff.push_str(&format!("-{line}\n")); + } + for line in target_content.lines() { + diff.push_str(&format!("+{line}\n")); + } + } + (Some(_), None) => { + diff.push_str(&format!("diff --git a/{path} /dev/null\n")); + diff.push_str("deleted file\n"); + } + (None, Some(_)) => { + diff.push_str(&format!("diff --git /dev/null b/{path}\n")); + diff.push_str("new file\n"); + } + _ => {} + } + } + Ok(diff) + } + .boxed() } fn default_branch( @@ -1032,6 +1292,24 @@ impl GitRepository for FakeGitRepository { anyhow::bail!("commit_data_reader not supported for FakeGitRepository") } + fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>> { + self.with_state_async(true, move |state| { + state.refs.insert(ref_name, commit); + Ok(()) + }) + } + + fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>> { + self.with_state_async(true, move |state| { + state.refs.remove(&ref_name); + Ok(()) + }) + } + + fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>> { + async { Ok(()) }.boxed() + } + fn set_trusted(&self, trusted: bool) { self.is_trusted .store(trusted, std::sync::atomic::Ordering::Release); diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 99efafadc0421791c526bfe80a751d186de4ff8a..a26abb81255003e4059f9bcc8a68aa3c6212a73a 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -57,7 +57,7 @@ use collections::{BTreeMap, btree_map}; use fake_git_repo::FakeGitRepositoryState; #[cfg(feature = "test-support")] use git::{ - repository::{InitialGraphCommitData, RepoPath, repo_path}, + repository::{InitialGraphCommitData, RepoPath, Worktree, repo_path}, status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus}, }; #[cfg(feature = "test-support")] @@ -1892,11 +1892,15 @@ impl FakeFs { anyhow::bail!("gitfile points to a non-directory") }; let common_dir = if let Some(child) = entries.get("commondir") { - Path::new( - std::str::from_utf8(child.file_content("commondir".as_ref())?) - .context("commondir content")?, - ) - .to_owned() + let raw = std::str::from_utf8(child.file_content("commondir".as_ref())?) + .context("commondir content")? + .trim(); + let raw_path = Path::new(raw); + if raw_path.is_relative() { + normalize_path(&canonical_path.join(raw_path)) + } else { + raw_path.to_owned() + } } else { canonical_path.clone() }; @@ -1960,6 +1964,116 @@ impl FakeFs { .unwrap(); } + pub async fn add_linked_worktree_for_repo( + &self, + dot_git: &Path, + emit_git_event: bool, + worktree: Worktree, + ) { + let ref_name = worktree + .ref_name + .as_ref() + .expect("linked worktree must have a ref_name"); + let branch_name = ref_name + .strip_prefix("refs/heads/") + .unwrap_or(ref_name.as_ref()); + + // Create ref in git state. + self.with_git_state(dot_git, false, |state| { + state + .refs + .insert(ref_name.to_string(), worktree.sha.to_string()); + }) + .unwrap(); + + // Create .git/worktrees// directory with HEAD, commondir, and gitdir. + let worktrees_entry_dir = dot_git.join("worktrees").join(branch_name); + self.create_dir(&worktrees_entry_dir).await.unwrap(); + + self.write_file_internal( + worktrees_entry_dir.join("HEAD"), + format!("ref: {ref_name}").into_bytes(), + false, + ) + .unwrap(); + + self.write_file_internal( + worktrees_entry_dir.join("commondir"), + dot_git.to_string_lossy().into_owned().into_bytes(), + false, + ) + .unwrap(); + + let worktree_dot_git = worktree.path.join(".git"); + self.write_file_internal( + worktrees_entry_dir.join("gitdir"), + worktree_dot_git.to_string_lossy().into_owned().into_bytes(), + false, + ) + .unwrap(); + + // Create the worktree checkout directory with a .git file pointing back. + self.create_dir(&worktree.path).await.unwrap(); + + self.write_file_internal( + &worktree_dot_git, + format!("gitdir: {}", worktrees_entry_dir.display()).into_bytes(), + false, + ) + .unwrap(); + + if emit_git_event { + self.with_git_state(dot_git, true, |_| {}).unwrap(); + } + } + + pub async fn remove_worktree_for_repo( + &self, + dot_git: &Path, + emit_git_event: bool, + ref_name: &str, + ) { + let branch_name = ref_name.strip_prefix("refs/heads/").unwrap_or(ref_name); + let worktrees_entry_dir = dot_git.join("worktrees").join(branch_name); + + // Read gitdir to find the worktree checkout path. + let gitdir_content = self + .load_internal(worktrees_entry_dir.join("gitdir")) + .await + .unwrap(); + let gitdir_str = String::from_utf8(gitdir_content).unwrap(); + let worktree_path = PathBuf::from(gitdir_str.trim()) + .parent() + .map(PathBuf::from) + .unwrap_or_default(); + + // Remove the worktree checkout directory. + self.remove_dir( + &worktree_path, + RemoveOptions { + recursive: true, + ignore_if_not_exists: true, + }, + ) + .await + .unwrap(); + + // Remove the .git/worktrees// directory. + self.remove_dir( + &worktrees_entry_dir, + RemoveOptions { + recursive: true, + ignore_if_not_exists: false, + }, + ) + .await + .unwrap(); + + if emit_git_event { + self.with_git_state(dot_git, true, |_| {}).unwrap(); + } + } + pub fn set_unmerged_paths_for_repo( &self, dot_git: &Path, diff --git a/crates/fs/tests/integration/fake_git_repo.rs b/crates/fs/tests/integration/fake_git_repo.rs index e327f92e996bfa0e89cc60a0a9c0d919bec8bc47..f4192a22bb42f88f8769ef59f817b2bf2a288fb9 100644 --- a/crates/fs/tests/integration/fake_git_repo.rs +++ b/crates/fs/tests/integration/fake_git_repo.rs @@ -24,7 +24,7 @@ async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) { // Create a worktree let worktree_1_dir = worktrees_dir.join("feature-branch"); repo.create_worktree( - "feature-branch".to_string(), + Some("feature-branch".to_string()), worktree_1_dir.clone(), Some("abc123".to_string()), ) @@ -47,9 +47,13 @@ async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) { // Create a second worktree (without explicit commit) let worktree_2_dir = worktrees_dir.join("bugfix-branch"); - repo.create_worktree("bugfix-branch".to_string(), worktree_2_dir.clone(), None) - .await - .unwrap(); + repo.create_worktree( + Some("bugfix-branch".to_string()), + worktree_2_dir.clone(), + None, + ) + .await + .unwrap(); let worktrees = repo.worktrees().await.unwrap(); assert_eq!(worktrees.len(), 3); @@ -155,7 +159,10 @@ async fn test_checkpoints(executor: BackgroundExecutor) { .unwrap() ); - repository.restore_checkpoint(checkpoint_1).await.unwrap(); + repository + .restore_checkpoint(checkpoint_1.clone()) + .await + .unwrap(); assert_eq!( fs.files_with_contents(Path::new("")), [ @@ -164,4 +171,22 @@ async fn test_checkpoints(executor: BackgroundExecutor) { (Path::new(path!("/foo/b")).into(), b"ipsum".into()) ] ); + + // diff_checkpoints: identical checkpoints produce empty diff + let diff = repository + .diff_checkpoints(checkpoint_2.clone(), checkpoint_3.clone()) + .await + .unwrap(); + assert!( + diff.is_empty(), + "identical checkpoints should produce empty diff" + ); + + // diff_checkpoints: different checkpoints produce non-empty diff + let diff = repository + .diff_checkpoints(checkpoint_1.clone(), checkpoint_2.clone()) + .await + .unwrap(); + assert!(diff.contains("b"), "diff should mention changed file 'b'"); + assert!(diff.contains("c"), "diff should mention added file 'c'"); } diff --git a/crates/fuzzy/src/char_bag.rs b/crates/fuzzy/src/char_bag.rs index 13b00816ed0141117fb6d5ac9265e4b82c7aa57d..1821a63793337862d9d6ad01a6a42072588d7be5 100644 --- a/crates/fuzzy/src/char_bag.rs +++ b/crates/fuzzy/src/char_bag.rs @@ -1,5 +1,9 @@ use std::iter::FromIterator; +pub fn simple_lowercase(c: char) -> char { + c.to_lowercase().next().unwrap_or(c) +} + #[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Hash)] pub struct CharBag(u64); @@ -9,7 +13,7 @@ impl CharBag { } fn insert(&mut self, c: char) { - let c = c.to_ascii_lowercase(); + let c = simple_lowercase(c); if c.is_ascii_lowercase() { let mut count = self.0; let idx = c as u8 - b'a'; diff --git a/crates/fuzzy/src/matcher.rs b/crates/fuzzy/src/matcher.rs index 782c9caca832d81fb6e4bce8f49b4f310664b292..102708d2fad6b560b1a606c34246033587affdda 100644 --- a/crates/fuzzy/src/matcher.rs +++ b/crates/fuzzy/src/matcher.rs @@ -1,10 +1,9 @@ use std::{ borrow::Borrow, - collections::BTreeMap, sync::atomic::{self, AtomicBool}, }; -use crate::CharBag; +use crate::{CharBag, char_bag::simple_lowercase}; const BASE_DISTANCE_PENALTY: f64 = 0.6; const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05; @@ -69,7 +68,6 @@ impl<'a> Matcher<'a> { { let mut candidate_chars = Vec::new(); let mut lowercase_candidate_chars = Vec::new(); - let mut extra_lowercase_chars = BTreeMap::new(); for candidate in candidates { if !candidate.borrow().has_chars(self.query_char_bag) { @@ -82,14 +80,9 @@ impl<'a> Matcher<'a> { candidate_chars.clear(); lowercase_candidate_chars.clear(); - extra_lowercase_chars.clear(); - for (i, c) in candidate.borrow().candidate_chars().enumerate() { + for c in candidate.borrow().candidate_chars() { candidate_chars.push(c); - let mut char_lowercased = c.to_lowercase().collect::>(); - if char_lowercased.len() > 1 { - extra_lowercase_chars.insert(i, char_lowercased.len() - 1); - } - lowercase_candidate_chars.append(&mut char_lowercased); + lowercase_candidate_chars.push(simple_lowercase(c)); } if !self.find_last_positions(lowercase_prefix, &lowercase_candidate_chars) { @@ -108,7 +101,6 @@ impl<'a> Matcher<'a> { &lowercase_candidate_chars, prefix, lowercase_prefix, - &extra_lowercase_chars, ); if score > 0.0 { @@ -146,7 +138,6 @@ impl<'a> Matcher<'a> { path_lowercased: &[char], prefix: &[char], lowercase_prefix: &[char], - extra_lowercase_chars: &BTreeMap, ) -> f64 { let score = self.recursive_score_match( path, @@ -156,7 +147,6 @@ impl<'a> Matcher<'a> { 0, 0, self.query.len() as f64, - extra_lowercase_chars, ) * self.query.len() as f64; if score <= 0.0 { @@ -201,7 +191,6 @@ impl<'a> Matcher<'a> { query_idx: usize, path_idx: usize, cur_score: f64, - extra_lowercase_chars: &BTreeMap, ) -> f64 { if query_idx == self.query.len() { return 1.0; @@ -228,13 +217,6 @@ impl<'a> Matcher<'a> { let mut last_slash = 0; for j in path_idx..=safe_limit { - let extra_lowercase_chars_count = extra_lowercase_chars - .iter() - .take_while(|&(&i, _)| i < j) - .map(|(_, increment)| increment) - .sum::(); - let j_regular = j - extra_lowercase_chars_count; - let path_char = if j < prefix.len() { lowercase_prefix[j] } else { @@ -247,20 +229,20 @@ impl<'a> Matcher<'a> { let is_path_sep = path_char == '/'; if query_idx == 0 && is_path_sep { - last_slash = j_regular; + last_slash = j; } let need_to_score = query_char == path_char || (is_path_sep && query_char == '_'); if need_to_score { - let curr = match prefix.get(j_regular) { + let curr = match prefix.get(j) { Some(&curr) => curr, - None => path[j_regular - prefix.len()], + None => path[j - prefix.len()], }; let mut char_score = 1.0; if j > path_idx { - let last = match prefix.get(j_regular - 1) { + let last = match prefix.get(j - 1) { Some(&last) => last, - None => path[j_regular - 1 - prefix.len()], + None => path[j - 1 - prefix.len()], }; if last == '/' { @@ -316,12 +298,11 @@ impl<'a> Matcher<'a> { query_idx + 1, j + 1, next_score, - extra_lowercase_chars, ) * multiplier; if new_score > score { score = new_score; - best_position = j_regular; + best_position = j; // Optimization: can't score better than 1. if new_score == 1.0 { break; @@ -469,12 +450,12 @@ mod tests { assert_eq!( match_single_path_query("İo/oluş", false, &mixed_unicode_paths), - vec![("İolu/oluş", vec![0, 2, 4, 6, 8, 10, 12])] + vec![("İolu/oluş", vec![0, 2, 5, 6, 7, 8, 9])] ); assert_eq!( match_single_path_query("İst/code", false, &mixed_unicode_paths), - vec![("İstanbul/code", vec![0, 2, 4, 6, 8, 10, 12, 14])] + vec![("İstanbul/code", vec![0, 2, 3, 9, 10, 11, 12, 13])] ); assert_eq!( @@ -536,12 +517,60 @@ mod tests { ); } + #[test] + fn test_positions_are_valid_char_boundaries_with_expanding_lowercase() { + // İ (U+0130) lowercases to "i\u{307}" (2 chars) under full case folding. + // With simple case mapping (used by this matcher), İ → 'i' (1 char), + // so positions remain valid byte boundaries. + let paths = vec!["İstanbul/code.rs", "aİbİc/dİeİf.txt", "src/İmport/İndex.ts"]; + + for query in &["code", "İst", "dİe", "İndex", "İmport", "abcdef"] { + let results = match_single_path_query(query, false, &paths); + for (path, positions) in &results { + for &pos in positions { + assert!( + path.is_char_boundary(pos), + "Position {pos} is not a valid char boundary in path {path:?} \ + (query: {query:?}, all positions: {positions:?})" + ); + } + } + } + } + + #[test] + fn test_positions_valid_with_various_multibyte_chars() { + // German ß uppercases to SS but lowercases to itself — no expansion. + // Armenian ligatures and other characters that could expand under full + // case folding should still produce valid byte boundaries. + let paths = vec![ + "straße/config.rs", + "Straße/München/file.txt", + "file/path.rs", // fi (U+FB01, fi ligature) + "ffoo/bar.txt", // ff (U+FB00, ff ligature) + "aÇbŞc/dÖeÜf.txt", // Turkish chars that don't expand + ]; + + for query in &["config", "Mün", "file", "bar", "abcdef", "straße", "ÇŞ"] { + let results = match_single_path_query(query, false, &paths); + for (path, positions) in &results { + for &pos in positions { + assert!( + path.is_char_boundary(pos), + "Position {pos} is not a valid char boundary in path {path:?} \ + (query: {query:?}, all positions: {positions:?})" + ); + } + } + } + } + fn match_single_path_query<'a>( query: &str, smart_case: bool, paths: &[&'a str], ) -> Vec<(&'a str, Vec)> { - let lowercase_query = query.to_lowercase().chars().collect::>(); + let lowercase_query = query.chars().map(simple_lowercase).collect::>(); let query = query.chars().collect::>(); let query_chars = CharBag::from(&lowercase_query[..]); @@ -551,7 +580,7 @@ mod tests { .collect::>(); let mut path_entries = Vec::new(); for (i, path) in paths.iter().enumerate() { - let lowercase_path = path.to_lowercase().chars().collect::>(); + let lowercase_path: Vec = path.chars().map(simple_lowercase).collect(); let char_bag = CharBag::from(lowercase_path.as_slice()); path_entries.push(PathMatchCandidate { is_dir: false, diff --git a/crates/fuzzy/src/paths.rs b/crates/fuzzy/src/paths.rs index cce0e082840c4cd05d6e2b21eac0073d3eb7700f..2f92f05b96a3be2da7053365d8a7c53722db6ab8 100644 --- a/crates/fuzzy/src/paths.rs +++ b/crates/fuzzy/src/paths.rs @@ -10,6 +10,7 @@ use util::{paths::PathStyle, rel_path::RelPath}; use crate::{ CharBag, + char_bag::simple_lowercase, matcher::{MatchCandidate, Matcher}, }; @@ -94,7 +95,7 @@ pub fn match_fixed_path_set( max_results: usize, path_style: PathStyle, ) -> Vec { - let lowercase_query = query.to_lowercase().chars().collect::>(); + let lowercase_query = query.chars().map(simple_lowercase).collect::>(); let query = query.chars().collect::>(); let query_char_bag = CharBag::from(&lowercase_query[..]); @@ -110,7 +111,7 @@ pub fn match_fixed_path_set( path_prefix_chars.extend(path_style.primary_separator().chars()); let lowercase_pfx = path_prefix_chars .iter() - .map(|c| c.to_ascii_lowercase()) + .map(|c| simple_lowercase(*c)) .collect::>(); (worktree_root_name, path_prefix_chars, lowercase_pfx) @@ -171,7 +172,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( let lowercase_query = query .iter() - .map(|query| query.to_ascii_lowercase()) + .map(|query| simple_lowercase(*query)) .collect::>(); let query = &query; @@ -217,7 +218,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( } let lowercase_prefix = prefix .iter() - .map(|c| c.to_ascii_lowercase()) + .map(|c| simple_lowercase(*c)) .collect::>(); matcher.match_candidates( &prefix, diff --git a/crates/fuzzy/src/strings.rs b/crates/fuzzy/src/strings.rs index 54539840cfb0ca251428d9f78d5d134f16afdf4c..fb191bd9dcadd81a5a9890032ef8b185cdf7342e 100644 --- a/crates/fuzzy/src/strings.rs +++ b/crates/fuzzy/src/strings.rs @@ -1,5 +1,6 @@ use crate::{ CharBag, + char_bag::simple_lowercase, matcher::{MatchCandidate, Matcher}, }; use gpui::BackgroundExecutor; @@ -141,7 +142,7 @@ where .collect(); } - let lowercase_query = query.to_lowercase().chars().collect::>(); + let lowercase_query = query.chars().map(simple_lowercase).collect::>(); let query = query.chars().collect::>(); let lowercase_query = &lowercase_query; diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index b03fe1b0c63904bfc751ab7946f92a7c8595db00..c42d2e28cf041e40404c1b8276ddcf5d10ca5f01 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -329,6 +329,7 @@ impl Upstream { pub struct CommitOptions { pub amend: bool, pub signoff: bool, + pub allow_empty: bool, } #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)] @@ -715,7 +716,7 @@ pub trait GitRepository: Send + Sync { fn create_worktree( &self, - branch_name: String, + branch_name: Option, path: PathBuf, from_commit: Option, ) -> BoxFuture<'_, Result<()>>; @@ -916,6 +917,12 @@ pub trait GitRepository: Send + Sync { fn commit_data_reader(&self) -> Result; + fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>>; + + fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>>; + + fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>>; + fn set_trusted(&self, trusted: bool); fn is_trusted(&self) -> bool; } @@ -1660,19 +1667,20 @@ impl GitRepository for RealGitRepository { fn create_worktree( &self, - branch_name: String, + branch_name: Option, path: PathBuf, from_commit: Option, ) -> BoxFuture<'_, Result<()>> { let git_binary = self.git_binary(); - let mut args = vec![ - OsString::from("worktree"), - OsString::from("add"), - OsString::from("-b"), - OsString::from(branch_name.as_str()), - OsString::from("--"), - OsString::from(path.as_os_str()), - ]; + let mut args = vec![OsString::from("worktree"), OsString::from("add")]; + if let Some(branch_name) = &branch_name { + args.push(OsString::from("-b")); + args.push(OsString::from(branch_name.as_str())); + } else { + args.push(OsString::from("--detach")); + } + args.push(OsString::from("--")); + args.push(OsString::from(path.as_os_str())); if let Some(from_commit) = from_commit { args.push(OsString::from(from_commit)); } else { @@ -2165,6 +2173,10 @@ impl GitRepository for RealGitRepository { cmd.arg("--signoff"); } + if options.allow_empty { + cmd.arg("--allow-empty"); + } + if let Some((name, email)) = name_and_email { cmd.arg("--author").arg(&format!("{name} <{email}>")); } @@ -2176,6 +2188,39 @@ impl GitRepository for RealGitRepository { .boxed() } + fn update_ref(&self, ref_name: String, commit: String) -> BoxFuture<'_, Result<()>> { + let git_binary = self.git_binary(); + self.executor + .spawn(async move { + let args: Vec = vec!["update-ref".into(), ref_name.into(), commit.into()]; + git_binary?.run(&args).await?; + Ok(()) + }) + .boxed() + } + + fn delete_ref(&self, ref_name: String) -> BoxFuture<'_, Result<()>> { + let git_binary = self.git_binary(); + self.executor + .spawn(async move { + let args: Vec = vec!["update-ref".into(), "-d".into(), ref_name.into()]; + git_binary?.run(&args).await?; + Ok(()) + }) + .boxed() + } + + fn repair_worktrees(&self) -> BoxFuture<'_, Result<()>> { + let git_binary = self.git_binary(); + self.executor + .spawn(async move { + let args: Vec = vec!["worktree".into(), "repair".into()]; + git_binary?.run(&args).await?; + Ok(()) + }) + .boxed() + } + fn push( &self, branch_name: String, @@ -4009,7 +4054,7 @@ mod tests { // Create a new worktree repo.create_worktree( - "test-branch".to_string(), + Some("test-branch".to_string()), worktree_path.clone(), Some("HEAD".to_string()), ) @@ -4068,7 +4113,7 @@ mod tests { // Create a worktree let worktree_path = worktrees_dir.join("worktree-to-remove"); repo.create_worktree( - "to-remove".to_string(), + Some("to-remove".to_string()), worktree_path.clone(), Some("HEAD".to_string()), ) @@ -4092,7 +4137,7 @@ mod tests { // Create a worktree let worktree_path = worktrees_dir.join("dirty-wt"); repo.create_worktree( - "dirty-wt".to_string(), + Some("dirty-wt".to_string()), worktree_path.clone(), Some("HEAD".to_string()), ) @@ -4162,7 +4207,7 @@ mod tests { // Create a worktree let old_path = worktrees_dir.join("old-worktree-name"); repo.create_worktree( - "old-name".to_string(), + Some("old-name".to_string()), old_path.clone(), Some("HEAD".to_string()), ) diff --git a/crates/git_graph/Cargo.toml b/crates/git_graph/Cargo.toml index cc3374a85932435d010daabdfe0e4b4eef628de6..e9e31a8361e367275c994e125ae6e04cbd652fc3 100644 --- a/crates/git_graph/Cargo.toml +++ b/crates/git_graph/Cargo.toml @@ -24,7 +24,6 @@ anyhow.workspace = true collections.workspace = true db.workspace = true editor.workspace = true -feature_flags.workspace = true git.workspace = true git_ui.workspace = true gpui.workspace = true diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index a66e840b2f41405b5c76f3999ea14414daa19d39..aa5f6bc6e1293cfd057baa0c5e9f77819da71086 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -1,6 +1,5 @@ use collections::{BTreeMap, HashMap, IndexSet}; use editor::Editor; -use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag}; use git::{ BuildCommitPermalinkParams, GitHostingProviderRegistry, GitRemote, Oid, ParsedGitRemote, parse_git_remote_url, @@ -26,7 +25,7 @@ use project::git_store::{ }; use search::{ SearchOption, SearchOptions, SearchSource, SelectNextMatch, SelectPreviousMatch, - ToggleCaseSensitive, + ToggleCaseSensitive, buffer_search, }; use settings::Settings; use smallvec::{SmallVec, smallvec}; @@ -42,8 +41,10 @@ use theme_settings::ThemeSettings; use time::{OffsetDateTime, UtcOffset, format_description::BorrowedFormatItem}; use ui::{ ButtonLike, Chip, ColumnWidthConfig, CommonAnimationExt as _, ContextMenu, DiffStat, Divider, - HighlightedLabel, RedistributableColumnsState, ScrollableHandle, Table, TableInteractionState, - TableResizeBehavior, Tooltip, WithScrollbar, prelude::*, + HeaderResizeInfo, HighlightedLabel, RedistributableColumnsState, ScrollableHandle, Table, + TableInteractionState, TableRenderContext, TableResizeBehavior, Tooltip, WithScrollbar, + bind_redistributable_columns, prelude::*, render_redistributable_columns_resize_handles, + render_table_header, table_row::TableRow, }; use workspace::{ Workspace, @@ -274,6 +275,8 @@ actions!( [ /// Opens the commit view for the selected commit. OpenCommitView, + /// Focuses the search field. + FocusSearch, ] ); @@ -730,8 +733,7 @@ pub fn init(cx: &mut App) { cx.observe_new(|workspace: &mut workspace::Workspace, _, _| { workspace.register_action_renderer(|div, workspace, _, cx| { div.when( - workspace.project().read(cx).active_repository(cx).is_some() - && cx.has_flag::(), + workspace.project().read(cx).active_repository(cx).is_some(), |div| { let workspace = workspace.weak_handle(); @@ -833,8 +835,8 @@ pub fn init(cx: &mut App) { .detach(); } -fn lane_center_x(bounds: Bounds, lane: f32, horizontal_scroll_offset: Pixels) -> Pixels { - bounds.origin.x + LEFT_PADDING + lane * LANE_WIDTH + LANE_WIDTH / 2.0 - horizontal_scroll_offset +fn lane_center_x(bounds: Bounds, lane: f32) -> Pixels { + bounds.origin.x + LEFT_PADDING + lane * LANE_WIDTH + LANE_WIDTH / 2.0 } fn to_row_center( @@ -901,9 +903,7 @@ pub struct GitGraph { context_menu: Option<(Entity, Point, Subscription)>, row_height: Pixels, table_interaction_state: Entity, - table_column_widths: Entity, - horizontal_scroll_offset: Pixels, - graph_viewport_width: Pixels, + column_widths: Entity, selected_entry_idx: Option, hovered_entry_idx: Option, graph_canvas_bounds: Rc>>>, @@ -933,8 +933,52 @@ impl GitGraph { font_size + px(12.0) } - fn graph_content_width(&self) -> Pixels { - (LANE_WIDTH * self.graph_data.max_lanes.min(8) as f32) + LEFT_PADDING * 2.0 + fn graph_canvas_content_width(&self) -> Pixels { + (LANE_WIDTH * self.graph_data.max_lanes.max(6) as f32) + LEFT_PADDING * 2.0 + } + + fn preview_column_fractions(&self, window: &Window, cx: &App) -> [f32; 5] { + let fractions = self + .column_widths + .read(cx) + .preview_fractions(window.rem_size()); + [ + fractions[0], + fractions[1], + fractions[2], + fractions[3], + fractions[4], + ] + } + + fn table_column_width_config(&self, window: &Window, cx: &App) -> ColumnWidthConfig { + let [_, description, date, author, commit] = self.preview_column_fractions(window, cx); + let table_total = description + date + author + commit; + + let widths = if table_total > 0.0 { + vec![ + DefiniteLength::Fraction(description / table_total), + DefiniteLength::Fraction(date / table_total), + DefiniteLength::Fraction(author / table_total), + DefiniteLength::Fraction(commit / table_total), + ] + } else { + vec![ + DefiniteLength::Fraction(0.25), + DefiniteLength::Fraction(0.25), + DefiniteLength::Fraction(0.25), + DefiniteLength::Fraction(0.25), + ] + }; + + ColumnWidthConfig::explicit(widths) + } + + fn graph_viewport_width(&self, window: &Window, cx: &App) -> Pixels { + self.column_widths + .read(cx) + .preview_column_width(0, window) + .unwrap_or_else(|| self.graph_canvas_content_width()) } pub fn new( @@ -972,20 +1016,22 @@ impl GitGraph { }); let table_interaction_state = cx.new(|cx| TableInteractionState::new(cx)); - let table_column_widths = cx.new(|_cx| { + let column_widths = cx.new(|_cx| { RedistributableColumnsState::new( - 4, + 5, vec![ - DefiniteLength::Fraction(0.72), - DefiniteLength::Fraction(0.12), - DefiniteLength::Fraction(0.10), - DefiniteLength::Fraction(0.06), + DefiniteLength::Fraction(0.14), + DefiniteLength::Fraction(0.6192), + DefiniteLength::Fraction(0.1032), + DefiniteLength::Fraction(0.086), + DefiniteLength::Fraction(0.0516), ], vec![ TableResizeBehavior::Resizable, TableResizeBehavior::Resizable, TableResizeBehavior::Resizable, TableResizeBehavior::Resizable, + TableResizeBehavior::Resizable, ], ) }); @@ -1020,9 +1066,7 @@ impl GitGraph { context_menu: None, row_height, table_interaction_state, - table_column_widths, - horizontal_scroll_offset: px(0.), - graph_viewport_width: px(88.), + column_widths, selected_entry_idx: None, hovered_entry_idx: None, graph_canvas_bounds: Rc::new(Cell::new(None)), @@ -1104,7 +1148,7 @@ impl GitGraph { } } } - RepositoryEvent::BranchChanged => { + RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => { self.pending_select_sha = None; // Only invalidate if we scanned atleast once, // meaning we are not inside the initial repo loading state @@ -1113,6 +1157,12 @@ impl GitGraph { self.invalidate_state(cx); } } + RepositoryEvent::StashEntriesChanged if self.log_source == LogSource::All => { + self.pending_select_sha = None; + if repository.read(cx).scan_id > 1 { + self.invalidate_state(cx); + } + } RepositoryEvent::GraphEvent(_, _) => {} _ => {} } @@ -2087,10 +2137,13 @@ impl GitGraph { let first_visible_row = (scroll_offset_y / row_height).floor() as usize; let vertical_scroll_offset = scroll_offset_y - (first_visible_row as f32 * row_height); - let horizontal_scroll_offset = self.horizontal_scroll_offset; - let max_lanes = self.graph_data.max_lanes.max(6); - let graph_width = LANE_WIDTH * max_lanes as f32 + LEFT_PADDING * 2.0; + let graph_viewport_width = self.graph_viewport_width(window, cx); + let graph_width = if self.graph_canvas_content_width() > graph_viewport_width { + self.graph_canvas_content_width() + } else { + graph_viewport_width + }; let last_visible_row = first_visible_row + (viewport_height / row_height).ceil() as usize + 1; @@ -2158,8 +2211,7 @@ impl GitGraph { bounds.origin.y + row_idx as f32 * row_height + row_height / 2.0 - vertical_scroll_offset; - let commit_x = - lane_center_x(bounds, row.lane as f32, horizontal_scroll_offset); + let commit_x = lane_center_x(bounds, row.lane as f32); draw_commit_circle(commit_x, row_y_center, row_color, window); } @@ -2171,8 +2223,7 @@ impl GitGraph { continue; }; - let line_x = - lane_center_x(bounds, start_column as f32, horizontal_scroll_offset); + let line_x = lane_center_x(bounds, start_column as f32); let start_row = line.full_interval.start as i32 - first_visible_row as i32; @@ -2188,6 +2239,8 @@ impl GitGraph { builder.move_to(point(line_x, from_y)); let segments = &line.segments[start_segment_idx..]; + let desired_curve_height = row_height / 3.0; + let desired_curve_width = LANE_WIDTH / 3.0; for (segment_idx, segment) in segments.iter().enumerate() { let is_last = segment_idx + 1 == segments.len(); @@ -2215,11 +2268,7 @@ impl GitGraph { on_row, curve_kind, } => { - let mut to_column = lane_center_x( - bounds, - *to_column as f32, - horizontal_scroll_offset, - ); + let mut to_column = lane_center_x(bounds, *to_column as f32); let mut to_row = to_row_center( *on_row - first_visible_row, @@ -2241,66 +2290,69 @@ impl GitGraph { if is_last { to_column -= column_shift; } - builder.move_to(point(current_column, current_row)); - if (to_column - current_column).abs() > LANE_WIDTH { - // Multi-lane checkout: straight down, small - // curve turn, then straight horizontal. - if (to_row - current_row).abs() > row_height { - let vertical_end = - point(current_column, to_row - row_height); - builder.line_to(vertical_end); - builder.move_to(vertical_end); - } - - let lane_shift = if going_right { - LANE_WIDTH - } else { - -LANE_WIDTH - }; - let curve_end = - point(current_column + lane_shift, to_row); - let curve_control = point(current_column, to_row); - builder.curve_to(curve_end, curve_control); - builder.move_to(curve_end); - - builder.line_to(point(to_column, to_row)); + let available_curve_width = + (to_column - current_column).abs(); + let available_curve_height = + (to_row - current_row).abs(); + let curve_width = + desired_curve_width.min(available_curve_width); + let curve_height = + desired_curve_height.min(available_curve_height); + let signed_curve_width = if going_right { + curve_width } else { - if (to_row - current_row).abs() > row_height { - let start_curve = - point(current_column, to_row - row_height); - builder.line_to(start_curve); - builder.move_to(start_curve); - } - let control = point(current_column, to_row); - builder.curve_to(point(to_column, to_row), control); - } + -curve_width + }; + let curve_start = + point(current_column, to_row - curve_height); + let curve_end = + point(current_column + signed_curve_width, to_row); + let curve_control = point(current_column, to_row); + + builder.move_to(point(current_column, current_row)); + builder.line_to(curve_start); + builder.move_to(curve_start); + builder.curve_to(curve_end, curve_control); + builder.move_to(curve_end); + builder.line_to(point(to_column, to_row)); } CurveKind::Merge => { if is_last { to_row -= COMMIT_CIRCLE_RADIUS; } - builder.move_to(point( + + let merge_start = point( current_column + column_shift, current_row - COMMIT_CIRCLE_RADIUS, - )); - - if (to_column - current_column).abs() > LANE_WIDTH { - let column_shift = if going_right { - LANE_WIDTH - } else { - -LANE_WIDTH - }; - let start_curve = point( - current_column + column_shift, - current_row - COMMIT_CIRCLE_RADIUS, - ); - builder.line_to(start_curve); - builder.move_to(start_curve); - } - - let control = point(to_column, current_row); - builder.curve_to(point(to_column, to_row), control); + ); + let available_curve_width = + (to_column - merge_start.x).abs(); + let available_curve_height = + (to_row - merge_start.y).abs(); + let curve_width = + desired_curve_width.min(available_curve_width); + let curve_height = + desired_curve_height.min(available_curve_height); + let signed_curve_width = if going_right { + curve_width + } else { + -curve_width + }; + let curve_start = point( + to_column - signed_curve_width, + merge_start.y, + ); + let curve_end = + point(to_column, merge_start.y + curve_height); + let curve_control = point(to_column, merge_start.y); + + builder.move_to(merge_start); + builder.line_to(curve_start); + builder.move_to(curve_start); + builder.curve_to(curve_end, curve_control); + builder.move_to(curve_end); + builder.line_to(point(to_column, to_row)); } } current_row = to_row; @@ -2342,9 +2394,8 @@ impl GitGraph { let local_y = position_y - canvas_bounds.origin.y; if local_y >= px(0.) && local_y < canvas_bounds.size.height { - let row_in_viewport = (local_y / self.row_height).floor() as usize; - let scroll_rows = (scroll_offset_y / self.row_height).floor() as usize; - let absolute_row = scroll_rows + row_in_viewport; + let absolute_y = local_y + scroll_offset_y; + let absolute_row = (absolute_y / self.row_height).floor() as usize; if absolute_row < self.graph_data.commits.len() { return Some(absolute_row); @@ -2409,25 +2460,8 @@ impl GitGraph { let new_y = (current_offset.y + delta.y).clamp(max_vertical_scroll, px(0.)); let new_offset = Point::new(current_offset.x, new_y); - let max_lanes = self.graph_data.max_lanes.max(1); - let graph_content_width = LANE_WIDTH * max_lanes as f32 + LEFT_PADDING * 2.0; - let max_horizontal_scroll = (graph_content_width - self.graph_viewport_width).max(px(0.)); - - let new_horizontal_offset = - (self.horizontal_scroll_offset - delta.x).clamp(px(0.), max_horizontal_scroll); - - let vertical_changed = new_offset != current_offset; - let horizontal_changed = new_horizontal_offset != self.horizontal_scroll_offset; - - if vertical_changed { + if new_offset != current_offset { table_state.set_scroll_offset(new_offset); - } - - if horizontal_changed { - self.horizontal_scroll_offset = new_horizontal_offset; - } - - if vertical_changed || horizontal_changed { cx.notify(); } } @@ -2522,118 +2556,200 @@ impl Render for GitGraph { this.child(self.render_loading_spinner(cx)) }) } else { - div() + let header_resize_info = HeaderResizeInfo::from_state(&self.column_widths, cx); + let header_context = TableRenderContext::for_column_widths( + Some(self.column_widths.read(cx).widths_to_render()), + true, + ); + let [ + graph_fraction, + description_fraction, + date_fraction, + author_fraction, + commit_fraction, + ] = self.preview_column_fractions(window, cx); + let table_fraction = + description_fraction + date_fraction + author_fraction + commit_fraction; + let table_width_config = self.table_column_width_config(window, cx); + + h_flex() .size_full() - .flex() - .flex_row() .child( div() - .w(self.graph_content_width()) - .h_full() + .flex_1() + .min_w_0() + .size_full() .flex() .flex_col() - .child( - div() - .flex() - .items_center() - .px_1() - .py_0p5() - .border_b_1() - .whitespace_nowrap() - .border_color(cx.theme().colors().border) - .child(Label::new("Graph").color(Color::Muted)), - ) - .child( - div() - .id("graph-canvas") - .flex_1() - .overflow_hidden() - .child(self.render_graph(window, cx)) - .on_scroll_wheel(cx.listener(Self::handle_graph_scroll)) - .on_mouse_move(cx.listener(Self::handle_graph_mouse_move)) - .on_click(cx.listener(Self::handle_graph_click)) - .on_hover(cx.listener(|this, &is_hovered: &bool, _, cx| { - if !is_hovered && this.hovered_entry_idx.is_some() { - this.hovered_entry_idx = None; - cx.notify(); - } - })), - ), - ) - .child({ - let row_height = self.row_height; - let selected_entry_idx = self.selected_entry_idx; - let hovered_entry_idx = self.hovered_entry_idx; - let weak_self = cx.weak_entity(); - let focus_handle = self.focus_handle.clone(); - div().flex_1().size_full().child( - Table::new(4) - .interactable(&self.table_interaction_state) - .hide_row_borders() - .hide_row_hover() - .header(vec![ - Label::new("Description") - .color(Color::Muted) - .into_any_element(), - Label::new("Date").color(Color::Muted).into_any_element(), - Label::new("Author").color(Color::Muted).into_any_element(), - Label::new("Commit").color(Color::Muted).into_any_element(), - ]) - .width_config(ColumnWidthConfig::redistributable( - self.table_column_widths.clone(), - )) - .map_row(move |(index, row), window, cx| { - let is_selected = selected_entry_idx == Some(index); - let is_hovered = hovered_entry_idx == Some(index); - let is_focused = focus_handle.is_focused(window); - let weak = weak_self.clone(); - let weak_for_hover = weak.clone(); - - let hover_bg = cx.theme().colors().element_hover.opacity(0.6); - let selected_bg = if is_focused { - cx.theme().colors().element_selected - } else { - cx.theme().colors().element_hover - }; - - row.h(row_height) - .when(is_selected, |row| row.bg(selected_bg)) - .when(is_hovered && !is_selected, |row| row.bg(hover_bg)) - .on_hover(move |&is_hovered, _, cx| { - weak_for_hover - .update(cx, |this, cx| { - if is_hovered { - if this.hovered_entry_idx != Some(index) { - this.hovered_entry_idx = Some(index); - cx.notify(); - } - } else if this.hovered_entry_idx == Some(index) { - // Only clear if this row was the hovered one - this.hovered_entry_idx = None; - cx.notify(); - } - }) - .ok(); - }) - .on_click(move |event, window, cx| { - let click_count = event.click_count(); - weak.update(cx, |this, cx| { - this.select_entry(index, ScrollStrategy::Center, cx); - if click_count >= 2 { - this.open_commit_view(index, window, cx); - } - }) - .ok(); - }) - .into_any_element() - }) - .uniform_list( - "git-graph-commits", - commit_count, - cx.processor(Self::render_table_rows), + .child(render_table_header( + TableRow::from_vec( + vec![ + Label::new("Graph") + .color(Color::Muted) + .truncate() + .into_any_element(), + Label::new("Description") + .color(Color::Muted) + .into_any_element(), + Label::new("Date").color(Color::Muted).into_any_element(), + Label::new("Author").color(Color::Muted).into_any_element(), + Label::new("Commit").color(Color::Muted).into_any_element(), + ], + 5, ), - ) - }) + header_context, + Some(header_resize_info), + Some(self.column_widths.entity_id()), + cx, + )) + .child({ + let row_height = self.row_height; + let selected_entry_idx = self.selected_entry_idx; + let hovered_entry_idx = self.hovered_entry_idx; + let weak_self = cx.weak_entity(); + let focus_handle = self.focus_handle.clone(); + + bind_redistributable_columns( + div() + .relative() + .flex_1() + .w_full() + .overflow_hidden() + .child( + h_flex() + .size_full() + .child( + div() + .w(DefiniteLength::Fraction(graph_fraction)) + .h_full() + .min_w_0() + .overflow_hidden() + .child( + div() + .id("graph-canvas") + .size_full() + .overflow_hidden() + .child( + div() + .size_full() + .child(self.render_graph(window, cx)), + ) + .on_scroll_wheel( + cx.listener(Self::handle_graph_scroll), + ) + .on_mouse_move( + cx.listener(Self::handle_graph_mouse_move), + ) + .on_click(cx.listener(Self::handle_graph_click)) + .on_hover(cx.listener( + |this, &is_hovered: &bool, _, cx| { + if !is_hovered + && this.hovered_entry_idx.is_some() + { + this.hovered_entry_idx = None; + cx.notify(); + } + }, + )), + ), + ) + .child( + div() + .w(DefiniteLength::Fraction(table_fraction)) + .h_full() + .min_w_0() + .child( + Table::new(4) + .interactable(&self.table_interaction_state) + .hide_row_borders() + .hide_row_hover() + .width_config(table_width_config) + .map_row(move |(index, row), window, cx| { + let is_selected = + selected_entry_idx == Some(index); + let is_hovered = + hovered_entry_idx == Some(index); + let is_focused = + focus_handle.is_focused(window); + let weak = weak_self.clone(); + let weak_for_hover = weak.clone(); + + let hover_bg = cx + .theme() + .colors() + .element_hover + .opacity(0.6); + let selected_bg = if is_focused { + cx.theme().colors().element_selected + } else { + cx.theme().colors().element_hover + }; + + row.h(row_height) + .when(is_selected, |row| row.bg(selected_bg)) + .when( + is_hovered && !is_selected, + |row| row.bg(hover_bg), + ) + .on_hover(move |&is_hovered, _, cx| { + weak_for_hover + .update(cx, |this, cx| { + if is_hovered { + if this.hovered_entry_idx + != Some(index) + { + this.hovered_entry_idx = + Some(index); + cx.notify(); + } + } else if this + .hovered_entry_idx + == Some(index) + { + this.hovered_entry_idx = + None; + cx.notify(); + } + }) + .ok(); + }) + .on_click(move |event, window, cx| { + let click_count = event.click_count(); + weak.update(cx, |this, cx| { + this.select_entry( + index, + ScrollStrategy::Center, + cx, + ); + if click_count >= 2 { + this.open_commit_view( + index, + window, + cx, + ); + } + }) + .ok(); + }) + .into_any_element() + }) + .uniform_list( + "git-graph-commits", + commit_count, + cx.processor(Self::render_table_rows), + ), + ), + ), + ) + .child(render_redistributable_columns_resize_handles( + &self.column_widths, + window, + cx, + )), + self.column_widths.clone(), + ) + }), + ) .on_drag_move::(cx.listener(|this, event, window, cx| { this.commit_details_split_state.update(cx, |state, cx| { state.on_drag_move(event, window, cx); @@ -2659,6 +2775,11 @@ impl Render for GitGraph { this.open_selected_commit_view(window, cx); })) .on_action(cx.listener(Self::cancel)) + .on_action(cx.listener(|this, _: &FocusSearch, window, cx| { + this.search_state + .editor + .update(cx, |editor, cx| editor.focus_handle(cx).focus(window, cx)); + })) .on_action(cx.listener(Self::select_first)) .on_action(cx.listener(Self::select_prev)) .on_action(cx.listener(Self::select_next)) @@ -2690,6 +2811,10 @@ impl Render for GitGraph { ) .with_priority(1) })) + .on_action(cx.listener(|_, _: &buffer_search::Deploy, window, cx| { + window.dispatch_action(Box::new(FocusSearch), cx); + cx.stop_propagation(); + })) } } @@ -3617,8 +3742,8 @@ mod tests { assert!( observed_repository_events .iter() - .any(|event| matches!(event, RepositoryEvent::BranchChanged)), - "initial repository scan should emit BranchChanged" + .any(|event| matches!(event, RepositoryEvent::HeadChanged)), + "initial repository scan should emit HeadChanged" ); let commit_count_after = repository.read_with(cx, |repo, _| { repo.get_graph_data(crate::LogSource::default(), crate::LogOrder::default()) @@ -3729,16 +3854,227 @@ mod tests { }); cx.run_until_parked(); - git_graph.update_in(&mut *cx, |this, window, cx| { - this.render(window, cx); - }); + cx.draw( + point(px(0.), px(0.)), + gpui::size(px(1200.), px(800.)), + |_, _| git_graph.clone().into_any_element(), + ); cx.run_until_parked(); - let commit_count_after_switch_back = + // Verify graph data is reloaded from repository cache on switch back + let reloaded_commit_count = git_graph.read_with(&*cx, |graph, _| graph.graph_data.commits.len()); assert_eq!( - initial_commit_count, commit_count_after_switch_back, - "graph_data should be repopulated from cache after switching back to the same repo" + reloaded_commit_count, + commits.len(), + "graph data should be reloaded after switching back" + ); + } + + #[gpui::test] + async fn test_graph_data_reloaded_after_stash_change(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + Path::new("/project"), + json!({ + ".git": {}, + "file.txt": "content", + }), + ) + .await; + + let initial_head = Oid::from_bytes(&[1; 20]).unwrap(); + let initial_stash = Oid::from_bytes(&[2; 20]).unwrap(); + let updated_head = Oid::from_bytes(&[3; 20]).unwrap(); + let updated_stash = Oid::from_bytes(&[4; 20]).unwrap(); + + fs.set_graph_commits( + Path::new("/project/.git"), + vec![ + Arc::new(InitialGraphCommitData { + sha: initial_head, + parents: smallvec![initial_stash], + ref_names: vec!["HEAD".into(), "refs/heads/main".into()], + }), + Arc::new(InitialGraphCommitData { + sha: initial_stash, + parents: smallvec![], + ref_names: vec!["refs/stash".into()], + }), + ], + ); + fs.with_git_state(Path::new("/project/.git"), true, |state| { + state.stash_entries = git::stash::GitStash { + entries: vec![git::stash::StashEntry { + index: 0, + oid: initial_stash, + message: "initial stash".to_string(), + branch: Some("main".to_string()), + timestamp: 1, + }] + .into(), + }; + }) + .unwrap(); + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + cx.run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project + .active_repository(cx) + .expect("should have a repository") + }); + + let (multi_workspace, cx) = cx.add_window_view(|window, cx| { + workspace::MultiWorkspace::test_new(project.clone(), window, cx) + }); + let workspace_weak = + multi_workspace.read_with(&*cx, |multi, _| multi.workspace().downgrade()); + let git_graph = cx.new_window_entity(|window, cx| { + GitGraph::new( + repository.read(cx).id, + project.read(cx).git_store().clone(), + workspace_weak, + window, + cx, + ) + }); + cx.run_until_parked(); + + let initial_shas = git_graph.read_with(&*cx, |graph, _| { + graph + .graph_data + .commits + .iter() + .map(|commit| commit.data.sha) + .collect::>() + }); + assert_eq!(initial_shas, vec![initial_head, initial_stash]); + + fs.set_graph_commits( + Path::new("/project/.git"), + vec![ + Arc::new(InitialGraphCommitData { + sha: updated_head, + parents: smallvec![updated_stash], + ref_names: vec!["HEAD".into(), "refs/heads/main".into()], + }), + Arc::new(InitialGraphCommitData { + sha: updated_stash, + parents: smallvec![], + ref_names: vec!["refs/stash".into()], + }), + ], + ); + fs.with_git_state(Path::new("/project/.git"), true, |state| { + state.stash_entries = git::stash::GitStash { + entries: vec![git::stash::StashEntry { + index: 0, + oid: updated_stash, + message: "updated stash".to_string(), + branch: Some("main".to_string()), + timestamp: 1, + }] + .into(), + }; + }) + .unwrap(); + + project + .update(cx, |project, cx| project.git_scans_complete(cx)) + .await; + cx.run_until_parked(); + + cx.draw( + point(px(0.), px(0.)), + gpui::size(px(1200.), px(800.)), + |_, _| git_graph.clone().into_any_element(), ); + cx.run_until_parked(); + + let reloaded_shas = git_graph.read_with(&*cx, |graph, _| { + graph + .graph_data + .commits + .iter() + .map(|commit| commit.data.sha) + .collect::>() + }); + assert_eq!(reloaded_shas, vec![updated_head, updated_stash]); + } + + #[gpui::test] + async fn test_git_graph_row_at_position_rounding(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + Path::new("/project"), + serde_json::json!({ + ".git": {}, + "file.txt": "content", + }), + ) + .await; + + let mut rng = StdRng::seed_from_u64(42); + let commits = generate_random_commit_dag(&mut rng, 10, false); + fs.set_graph_commits(Path::new("/project/.git"), commits.clone()); + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + cx.run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project + .active_repository(cx) + .expect("should have a repository") + }); + + let (multi_workspace, cx) = cx.add_window_view(|window, cx| { + workspace::MultiWorkspace::test_new(project.clone(), window, cx) + }); + + let workspace_weak = + multi_workspace.read_with(&*cx, |multi, _| multi.workspace().downgrade()); + + let git_graph = cx.new_window_entity(|window, cx| { + GitGraph::new( + repository.read(cx).id, + project.read(cx).git_store().clone(), + workspace_weak, + window, + cx, + ) + }); + cx.run_until_parked(); + + git_graph.update(cx, |graph, cx| { + assert!( + graph.graph_data.commits.len() >= 10, + "graph should load dummy commits" + ); + + graph.row_height = px(20.0); + let origin_y = px(100.0); + graph.graph_canvas_bounds.set(Some(Bounds { + origin: point(px(0.0), origin_y), + size: gpui::size(px(100.0), px(1000.0)), + })); + + graph.table_interaction_state.update(cx, |state, _| { + state.set_scroll_offset(point(px(0.0), px(-15.0))) + }); + let pos_y = origin_y + px(10.0); + let absolute_calc_row = graph.row_at_position(pos_y, cx); + + assert_eq!( + absolute_calc_row, + Some(1), + "Row calculation should yield absolute row exactly" + ); + }); } } diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index d95e25fbc7821d42fac4386b522c4effb9462715..e06d16708697f721d9377365223dc444ba7b08ae 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -27,7 +27,6 @@ db.workspace = true editor.workspace = true file_icons.workspace = true futures.workspace = true -feature_flags.workspace = true fuzzy.workspace = true git.workspace = true gpui.workspace = true diff --git a/crates/git_ui/src/commit_modal.rs b/crates/git_ui/src/commit_modal.rs index 432da803e6eedfec304836198f6111f5418084cc..2088ad77ec5d7e71bdfb42ebcbfab6d001f64375 100644 --- a/crates/git_ui/src/commit_modal.rs +++ b/crates/git_ui/src/commit_modal.rs @@ -453,6 +453,7 @@ impl CommitModal { CommitOptions { amend: is_amend_pending, signoff: is_signoff_enabled, + allow_empty: false, }, window, cx, diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index 757ec1e0ebb92431e110e20f0833e2fcd0a88177..aac44c7f9c6eaf6f18c72bea390c0a0b7ad1a4bd 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -3,7 +3,6 @@ use buffer_diff::BufferDiff; use collections::HashMap; use editor::display_map::{BlockPlacement, BlockProperties, BlockStyle}; use editor::{Addon, Editor, EditorEvent, ExcerptRange, MultiBuffer, multibuffer_context_lines}; -use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag}; use git::repository::{CommitDetails, CommitDiff, RepoPath, is_binary_content}; use git::status::{FileStatus, StatusCode, TrackedStatus}; use git::{ @@ -212,7 +211,7 @@ impl CommitView { editor.insert_blocks( [BlockProperties { - placement: BlockPlacement::Above(editor::Anchor::min()), + placement: BlockPlacement::Above(editor::Anchor::Min), height: Some(1), style: BlockStyle::Sticky, render: Arc::new(|_| gpui::Empty.into_any_element()), @@ -223,7 +222,10 @@ impl CommitView { editor .buffer() .read(cx) - .buffer_anchor_to_anchor(&message_buffer, Anchor::MAX, cx) + .snapshot(cx) + .anchor_in_buffer(Anchor::max_for_buffer( + message_buffer.read(cx).remote_id(), + )) .map(|anchor| BlockProperties { placement: BlockPlacement::Below(anchor), height: Some(1), @@ -1042,21 +1044,19 @@ impl Render for CommitViewToolbar { }), ) .when(!is_stash, |this| { - this.when(cx.has_flag::(), |this| { - this.child( - IconButton::new("show-in-git-graph", IconName::GitGraph) - .icon_size(IconSize::Small) - .tooltip(Tooltip::text("Show in Git Graph")) - .on_click(move |_, window, cx| { - window.dispatch_action( - Box::new(crate::git_panel::OpenAtCommit { - sha: sha_for_graph.clone(), - }), - cx, - ); - }), - ) - }) + this.child( + IconButton::new("show-in-git-graph", IconName::GitGraph) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Show in Git Graph")) + .on_click(move |_, window, cx| { + window.dispatch_action( + Box::new(crate::git_panel::OpenAtCommit { + sha: sha_for_graph.clone(), + }), + cx, + ); + }), + ) .children(remote_info.map(|(provider_name, url)| { let icon = match provider_name.as_str() { "GitHub" => IconName::Github, diff --git a/crates/git_ui/src/conflict_view.rs b/crates/git_ui/src/conflict_view.rs index c61214123dff8cbd414c89b586f1176f7255266e..25175dce48163778615c26a585cd8a6319c1735f 100644 --- a/crates/git_ui/src/conflict_view.rs +++ b/crates/git_ui/src/conflict_view.rs @@ -2,23 +2,23 @@ use agent_settings::AgentSettings; use collections::{HashMap, HashSet}; use editor::{ ConflictsOurs, ConflictsOursMarker, ConflictsOuter, ConflictsTheirs, ConflictsTheirsMarker, - Editor, EditorEvent, ExcerptId, MultiBuffer, RowHighlightOptions, + Editor, EditorEvent, MultiBuffer, RowHighlightOptions, display_map::{BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId}, }; use gpui::{ - App, Context, DismissEvent, Entity, InteractiveElement as _, ParentElement as _, Subscription, - Task, WeakEntity, + App, ClickEvent, Context, Empty, Entity, InteractiveElement as _, ParentElement as _, + Subscription, Task, WeakEntity, }; use language::{Anchor, Buffer, BufferId}; use project::{ ConflictRegion, ConflictSet, ConflictSetUpdate, Project, ProjectItem as _, - git_store::{GitStoreEvent, RepositoryEvent}, + git_store::{GitStore, GitStoreEvent, RepositoryEvent}, }; use settings::Settings; -use std::{cell::RefCell, ops::Range, rc::Rc, sync::Arc}; -use ui::{ActiveTheme, Divider, Element as _, Styled, Window, prelude::*}; +use std::{ops::Range, sync::Arc}; +use ui::{ButtonLike, Divider, Tooltip, prelude::*}; use util::{ResultExt as _, debug_panic, maybe}; -use workspace::{Workspace, notifications::simple_message_notification::MessageNotification}; +use workspace::{StatusItemView, Workspace, item::ItemHandle}; use zed_actions::agent::{ ConflictContent, ResolveConflictedFilesWithAgent, ResolveConflictsWithAgent, }; @@ -67,62 +67,22 @@ pub fn register_editor(editor: &mut Editor, buffer: Entity, cx: &mu let buffers = buffer.read(cx).all_buffers(); for buffer in buffers { - buffer_added(editor, buffer, cx); + buffer_ranges_updated(editor, buffer, cx); } cx.subscribe(&cx.entity(), |editor, _, event, cx| match event { - EditorEvent::ExcerptsAdded { buffer, .. } => buffer_added(editor, buffer.clone(), cx), - EditorEvent::ExcerptsExpanded { ids } => { - let multibuffer = editor.buffer().read(cx).snapshot(cx); - for excerpt_id in ids { - let Some(buffer) = multibuffer.buffer_for_excerpt(*excerpt_id) else { - continue; - }; - let addon = editor.addon::().unwrap(); - let Some(conflict_set) = addon.conflict_set(buffer.remote_id()).clone() else { - return; - }; - excerpt_for_buffer_updated(editor, conflict_set, cx); - } + EditorEvent::BufferRangesUpdated { buffer, .. } => { + buffer_ranges_updated(editor, buffer.clone(), cx) + } + EditorEvent::BuffersRemoved { removed_buffer_ids } => { + buffers_removed(editor, removed_buffer_ids, cx) } - EditorEvent::ExcerptsRemoved { - removed_buffer_ids, .. - } => buffers_removed(editor, removed_buffer_ids, cx), _ => {} }) .detach(); } -fn excerpt_for_buffer_updated( - editor: &mut Editor, - conflict_set: Entity, - cx: &mut Context, -) { - let conflicts_len = conflict_set.read(cx).snapshot().conflicts.len(); - let buffer_id = conflict_set.read(cx).snapshot().buffer_id; - let Some(buffer_conflicts) = editor - .addon_mut::() - .unwrap() - .buffers - .get(&buffer_id) - else { - return; - }; - let addon_conflicts_len = buffer_conflicts.block_ids.len(); - conflicts_updated( - editor, - conflict_set, - &ConflictSetUpdate { - buffer_range: None, - old_range: 0..addon_conflicts_len, - new_range: 0..conflicts_len, - }, - cx, - ); -} - -#[ztracing::instrument(skip_all)] -fn buffer_added(editor: &mut Editor, buffer: Entity, cx: &mut Context) { +fn buffer_ranges_updated(editor: &mut Editor, buffer: Entity, cx: &mut Context) { let Some(project) = editor.project() else { return; }; @@ -188,14 +148,6 @@ fn conflicts_updated( let conflict_set = conflict_set.read(cx).snapshot(); let multibuffer = editor.buffer().read(cx); let snapshot = multibuffer.snapshot(cx); - let excerpts = multibuffer.excerpts_for_buffer(buffer_id, cx); - let Some(buffer_snapshot) = excerpts - .first() - .and_then(|(excerpt_id, _, _)| snapshot.buffer_for_excerpt(*excerpt_id)) - else { - return; - }; - let old_range = maybe!({ let conflict_addon = editor.addon_mut::().unwrap(); let buffer_conflicts = conflict_addon.buffers.get(&buffer_id)?; @@ -230,23 +182,7 @@ fn conflicts_updated( let mut removed_highlighted_ranges = Vec::new(); let mut removed_block_ids = HashSet::default(); for (conflict_range, block_id) in old_conflicts { - let Some((excerpt_id, _, _)) = excerpts.iter().find(|(_, _, range)| { - let precedes_start = range - .context - .start - .cmp(&conflict_range.start, buffer_snapshot) - .is_le(); - let follows_end = range - .context - .end - .cmp(&conflict_range.start, buffer_snapshot) - .is_ge(); - precedes_start && follows_end - }) else { - continue; - }; - let excerpt_id = *excerpt_id; - let Some(range) = snapshot.anchor_range_in_excerpt(excerpt_id, conflict_range) else { + let Some(range) = snapshot.buffer_anchor_range_to_anchor_range(conflict_range) else { continue; }; removed_highlighted_ranges.push(range.clone()); @@ -272,26 +208,9 @@ fn conflicts_updated( let new_conflicts = &conflict_set.conflicts[event.new_range.clone()]; let mut blocks = Vec::new(); for conflict in new_conflicts { - let Some((excerpt_id, _, _)) = excerpts.iter().find(|(_, _, range)| { - let precedes_start = range - .context - .start - .cmp(&conflict.range.start, buffer_snapshot) - .is_le(); - let follows_end = range - .context - .end - .cmp(&conflict.range.start, buffer_snapshot) - .is_ge(); - precedes_start && follows_end - }) else { - continue; - }; - let excerpt_id = *excerpt_id; + update_conflict_highlighting(editor, conflict, &snapshot, cx); - update_conflict_highlighting(editor, conflict, &snapshot, excerpt_id, cx); - - let Some(anchor) = snapshot.anchor_in_excerpt(excerpt_id, conflict.range.start) else { + let Some(anchor) = snapshot.anchor_in_excerpt(conflict.range.start) else { continue; }; @@ -302,7 +221,7 @@ fn conflicts_updated( style: BlockStyle::Sticky, render: Arc::new({ let conflict = conflict.clone(); - move |cx| render_conflict_buttons(&conflict, excerpt_id, editor_handle.clone(), cx) + move |cx| render_conflict_buttons(&conflict, editor_handle.clone(), cx) }), priority: 0, }) @@ -328,14 +247,13 @@ fn update_conflict_highlighting( editor: &mut Editor, conflict: &ConflictRegion, buffer: &editor::MultiBufferSnapshot, - excerpt_id: editor::ExcerptId, cx: &mut Context, ) -> Option<()> { log::debug!("update conflict highlighting for {conflict:?}"); - let outer = buffer.anchor_range_in_excerpt(excerpt_id, conflict.range.clone())?; - let ours = buffer.anchor_range_in_excerpt(excerpt_id, conflict.ours.clone())?; - let theirs = buffer.anchor_range_in_excerpt(excerpt_id, conflict.theirs.clone())?; + let outer = buffer.buffer_anchor_range_to_anchor_range(conflict.range.clone())?; + let ours = buffer.buffer_anchor_range_to_anchor_range(conflict.ours.clone())?; + let theirs = buffer.buffer_anchor_range_to_anchor_range(conflict.theirs.clone())?; let ours_background = cx.theme().colors().version_control_conflict_marker_ours; let theirs_background = cx.theme().colors().version_control_conflict_marker_theirs; @@ -373,7 +291,6 @@ fn update_conflict_highlighting( fn render_conflict_buttons( conflict: &ConflictRegion, - excerpt_id: ExcerptId, editor: WeakEntity, cx: &mut BlockContext, ) -> AnyElement { @@ -395,7 +312,6 @@ fn render_conflict_buttons( move |_, window, cx| { resolve_conflict( editor.clone(), - excerpt_id, conflict.clone(), vec![ours.clone()], window, @@ -415,7 +331,6 @@ fn render_conflict_buttons( move |_, window, cx| { resolve_conflict( editor.clone(), - excerpt_id, conflict.clone(), vec![theirs.clone()], window, @@ -436,7 +351,6 @@ fn render_conflict_buttons( move |_, window, cx| { resolve_conflict( editor.clone(), - excerpt_id, conflict.clone(), vec![ours.clone(), theirs.clone()], window, @@ -461,7 +375,7 @@ fn render_conflict_buttons( let content = editor .update(cx, |editor, cx| { let multibuffer = editor.buffer().read(cx); - let buffer_id = conflict.ours.end.buffer_id?; + let buffer_id = conflict.ours.end.buffer_id; let buffer = multibuffer.buffer(buffer_id)?; let buffer_read = buffer.read(cx); let snapshot = buffer_read.snapshot(); @@ -519,77 +433,8 @@ fn collect_conflicted_file_paths(project: &Project, cx: &App) -> Vec { paths } -pub(crate) fn register_conflict_notification( - workspace: &mut Workspace, - cx: &mut Context, -) { - let git_store = workspace.project().read(cx).git_store().clone(); - - let last_shown_paths: Rc>> = Rc::new(RefCell::new(HashSet::default())); - - cx.subscribe(&git_store, move |workspace, _git_store, event, cx| { - let conflicts_changed = matches!( - event, - GitStoreEvent::ConflictsUpdated - | GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::StatusesChanged, _) - ); - if !AgentSettings::get_global(cx).enabled(cx) || !conflicts_changed { - return; - } - let project = workspace.project().read(cx); - if project.is_via_collab() { - return; - } - - if workspace.is_notification_suppressed(workspace::merge_conflict_notification_id()) { - return; - } - - let paths = collect_conflicted_file_paths(project, cx); - let notification_id = workspace::merge_conflict_notification_id(); - let current_paths_set: HashSet = paths.iter().cloned().collect(); - - if paths.is_empty() { - last_shown_paths.borrow_mut().clear(); - workspace.dismiss_notification(¬ification_id, cx); - } else if *last_shown_paths.borrow() != current_paths_set { - // Only show the notification if the set of conflicted paths has changed. - // This prevents re-showing after the user dismisses it while working on the same conflicts. - *last_shown_paths.borrow_mut() = current_paths_set; - let file_count = paths.len(); - workspace.show_notification(notification_id, cx, |cx| { - cx.new(|cx| { - let message = format!( - "{file_count} file{} have unresolved merge conflicts", - if file_count == 1 { "" } else { "s" } - ); - - MessageNotification::new(message, cx) - .primary_message("Resolve with Agent") - .primary_icon(IconName::ZedAssistant) - .primary_icon_color(Color::Muted) - .primary_on_click({ - let paths = paths.clone(); - move |window, cx| { - window.dispatch_action( - Box::new(ResolveConflictedFilesWithAgent { - conflicted_file_paths: paths.clone(), - }), - cx, - ); - cx.emit(DismissEvent); - } - }) - }) - }); - } - }) - .detach(); -} - pub(crate) fn resolve_conflict( editor: WeakEntity, - excerpt_id: ExcerptId, resolved_conflict: ConflictRegion, ranges: Vec>, window: &mut Window, @@ -601,7 +446,7 @@ pub(crate) fn resolve_conflict( let workspace = editor.workspace()?; let project = editor.project()?.clone(); let multibuffer = editor.buffer().clone(); - let buffer_id = resolved_conflict.ours.end.buffer_id?; + let buffer_id = resolved_conflict.ours.end.buffer_id; let buffer = multibuffer.read(cx).buffer(buffer_id)?; resolved_conflict.resolve(buffer.clone(), &ranges, cx); let conflict_addon = editor.addon_mut::().unwrap(); @@ -620,7 +465,7 @@ pub(crate) fn resolve_conflict( .ok()?; let &(_, block_id) = &state.block_ids[ix]; let range = - snapshot.anchor_range_in_excerpt(excerpt_id, resolved_conflict.range)?; + snapshot.buffer_anchor_range_to_anchor_range(resolved_conflict.range)?; editor.remove_gutter_highlights::(vec![range.clone()], cx); @@ -660,3 +505,171 @@ pub(crate) fn resolve_conflict( } }) } + +pub struct MergeConflictIndicator { + project: Entity, + conflicted_paths: Vec, + last_shown_paths: HashSet, + dismissed: bool, + _subscription: Subscription, +} + +impl MergeConflictIndicator { + pub fn new(workspace: &Workspace, cx: &mut Context) -> Self { + let project = workspace.project().clone(); + let git_store = project.read(cx).git_store().clone(); + + let subscription = cx.subscribe(&git_store, Self::on_git_store_event); + + let conflicted_paths = collect_conflicted_file_paths(project.read(cx), cx); + let last_shown_paths: HashSet = conflicted_paths.iter().cloned().collect(); + + Self { + project, + conflicted_paths, + last_shown_paths, + dismissed: false, + _subscription: subscription, + } + } + + fn on_git_store_event( + &mut self, + _git_store: Entity, + event: &GitStoreEvent, + cx: &mut Context, + ) { + let conflicts_changed = matches!( + event, + GitStoreEvent::ConflictsUpdated + | GitStoreEvent::RepositoryUpdated(_, RepositoryEvent::StatusesChanged, _) + ); + + let agent_settings = AgentSettings::get_global(cx); + if !agent_settings.enabled(cx) + || !agent_settings.show_merge_conflict_indicator + || !conflicts_changed + { + return; + } + + let project = self.project.read(cx); + if project.is_via_collab() { + return; + } + + let paths = collect_conflicted_file_paths(project, cx); + let current_paths_set: HashSet = paths.iter().cloned().collect(); + + if paths.is_empty() { + self.conflicted_paths.clear(); + self.last_shown_paths.clear(); + self.dismissed = false; + cx.notify(); + } else if self.last_shown_paths != current_paths_set { + self.last_shown_paths = current_paths_set; + self.conflicted_paths = paths; + self.dismissed = false; + cx.notify(); + } + } + + fn resolve_with_agent(&mut self, window: &mut Window, cx: &mut Context) { + window.dispatch_action( + Box::new(ResolveConflictedFilesWithAgent { + conflicted_file_paths: self.conflicted_paths.clone(), + }), + cx, + ); + self.dismissed = true; + cx.notify(); + } + + fn dismiss(&mut self, _: &ClickEvent, _window: &mut Window, cx: &mut Context) { + self.dismissed = true; + cx.notify(); + } +} + +impl Render for MergeConflictIndicator { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + let agent_settings = AgentSettings::get_global(cx); + if !agent_settings.enabled(cx) + || !agent_settings.show_merge_conflict_indicator + || self.conflicted_paths.is_empty() + || self.dismissed + { + return Empty.into_any_element(); + } + + let file_count = self.conflicted_paths.len(); + + let message: SharedString = format!( + "Resolve Merge Conflict{} with Agent", + if file_count == 1 { "" } else { "s" } + ) + .into(); + + let tooltip_label: SharedString = format!( + "Found {} {} across the codebase", + file_count, + if file_count == 1 { + "conflict" + } else { + "conflicts" + } + ) + .into(); + + let border_color = cx.theme().colors().text_accent.opacity(0.2); + + h_flex() + .h(rems_from_px(22.)) + .rounded_sm() + .border_1() + .border_color(border_color) + .child( + ButtonLike::new("update-button") + .child( + h_flex() + .h_full() + .gap_1() + .child( + Icon::new(IconName::GitMergeConflict) + .size(IconSize::Small) + .color(Color::Muted), + ) + .child(Label::new(message).size(LabelSize::Small)), + ) + .tooltip(move |_, cx| { + Tooltip::with_meta( + tooltip_label.clone(), + None, + "Click to Resolve with Agent", + cx, + ) + }) + .on_click(cx.listener(|this, _, window, cx| { + this.resolve_with_agent(window, cx); + })), + ) + .child( + div().border_l_1().border_color(border_color).child( + IconButton::new("dismiss-merge-conflicts", IconName::Close) + .icon_size(IconSize::XSmall) + .on_click(cx.listener(Self::dismiss)), + ), + ) + .into_any_element() + } +} + +impl StatusItemView for MergeConflictIndicator { + fn set_active_pane_item( + &mut self, + _: Option<&dyn ItemHandle>, + _window: &mut Window, + _: &mut Context, + ) { + } +} diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index d8ef930cb2509b0e92b7fe8f90c4cbaf4121132c..0cb8ec6b78929d216b700b6e21cbf43a538c6f56 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -20,7 +20,6 @@ use editor::{ actions::ExpandAllDiffHunks, }; use editor::{EditorStyle, RewrapOptions}; -use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag}; use file_icons::FileIcons; use futures::StreamExt as _; use git::commit::ParsedCommitMessage; @@ -49,7 +48,7 @@ use language_model::{ LanguageModelRequestMessage, Role, }; use menu; -use multi_buffer::ExcerptInfo; +use multi_buffer::ExcerptBoundaryInfo; use notifications::status_toast::{StatusToast, ToastIcon}; use panel::{PanelHeader, panel_button, panel_filled_button, panel_icon_button}; use project::{ @@ -781,7 +780,7 @@ impl GitPanel { move |this, _git_store, event, window, cx| match event { GitStoreEvent::RepositoryUpdated( _, - RepositoryEvent::StatusesChanged | RepositoryEvent::BranchChanged, + RepositoryEvent::StatusesChanged | RepositoryEvent::HeadChanged, true, ) | GitStoreEvent::RepositoryAdded @@ -2156,6 +2155,7 @@ impl GitPanel { CommitOptions { amend: false, signoff: self.signoff_enabled, + allow_empty: false, }, window, cx, @@ -2196,6 +2196,7 @@ impl GitPanel { CommitOptions { amend: true, signoff: self.signoff_enabled, + allow_empty: false, }, window, cx, @@ -4455,7 +4456,11 @@ impl GitPanel { git_panel .update(cx, |git_panel, cx| { git_panel.commit_changes( - CommitOptions { amend, signoff }, + CommitOptions { + amend, + signoff, + allow_empty: false, + }, window, cx, ); @@ -4529,7 +4534,6 @@ impl GitPanel { let commit = branch.most_recent_commit.as_ref()?.clone(); let workspace = self.workspace.clone(); let this = cx.entity(); - let can_open_git_graph = cx.has_flag::(); Some( h_flex() @@ -4607,18 +4611,16 @@ impl GitPanel { ), ) }) - .when(can_open_git_graph, |this| { - this.child( - panel_icon_button("git-graph-button", IconName::GitGraph) - .icon_size(IconSize::Small) - .tooltip(|_window, cx| { - Tooltip::for_action("Open Git Graph", &Open, cx) - }) - .on_click(|_, window, cx| { - window.dispatch_action(Open.boxed_clone(), cx) - }), - ) - }), + .child( + panel_icon_button("git-graph-button", IconName::GitGraph) + .icon_size(IconSize::Small) + .tooltip(|_window, cx| { + Tooltip::for_action("Open Git Graph", &Open, cx) + }) + .on_click(|_, window, cx| { + window.dispatch_action(Open.boxed_clone(), cx) + }), + ), ), ) } @@ -5754,11 +5756,12 @@ impl editor::Addon for GitPanelAddon { fn render_buffer_header_controls( &self, - excerpt_info: &ExcerptInfo, + _excerpt_info: &ExcerptBoundaryInfo, + buffer: &language::BufferSnapshot, window: &Window, cx: &App, ) -> Option { - let file = excerpt_info.buffer.file()?; + let file = buffer.file()?; let git_panel = self.workspace.upgrade()?.read(cx).panel::(cx)?; git_panel diff --git a/crates/git_ui/src/git_ui.rs b/crates/git_ui/src/git_ui.rs index e12e9142d081c5f083a1f9ba414d7099776f327d..7d73760e34d1b2923a247f71b04fc8b5218f380b 100644 --- a/crates/git_ui/src/git_ui.rs +++ b/crates/git_ui/src/git_ui.rs @@ -47,6 +47,8 @@ pub mod stash_picker; pub mod text_diff_view; pub mod worktree_picker; +pub use conflict_view::MergeConflictIndicator; + pub fn init(cx: &mut App) { editor::set_blame_renderer(blame_ui::GitBlameRenderer, cx); commit_view::init(cx); @@ -62,7 +64,6 @@ pub fn init(cx: &mut App) { git_panel::register(workspace); repository_selector::register(workspace); git_picker::register(workspace); - conflict_view::register_conflict_notification(workspace, cx); let project = workspace.project().read(cx); if project.is_read_only(cx) { diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index ae27b6e51fcb8f72b86f819a1aa4ac05c17c6e5f..8fa4680593a7565c84efd7503f6cf9d188d3be35 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -501,9 +501,11 @@ impl ProjectDiff { pub fn active_path(&self, cx: &App) -> Option { let editor = self.editor.read(cx).focused_editor().read(cx); + let multibuffer = editor.buffer().read(cx); let position = editor.selections.newest_anchor().head(); - let multi_buffer = editor.buffer().read(cx); - let (_, buffer, _) = multi_buffer.excerpt_containing(position, cx)?; + let snapshot = multibuffer.snapshot(cx); + let (text_anchor, _) = snapshot.anchor_to_buffer_anchor(position)?; + let buffer = multibuffer.buffer(text_anchor.buffer_id)?; let file = buffer.read(cx).file()?; Some(ProjectPath { @@ -516,9 +518,7 @@ impl ProjectDiff { self.editor.update(cx, |editor, cx| { editor.rhs_editor().update(cx, |editor, cx| { editor.change_selections(Default::default(), window, cx, |s| { - s.select_ranges(vec![ - multi_buffer::Anchor::min()..multi_buffer::Anchor::min(), - ]); + s.select_ranges(vec![multi_buffer::Anchor::Min..multi_buffer::Anchor::Min]); }); }); }); @@ -569,17 +569,17 @@ impl ProjectDiff { .collect::>(); if !ranges.iter().any(|range| range.start != range.end) { selection = false; - if let Some((excerpt_id, _, range)) = self - .editor - .read(cx) - .rhs_editor() - .read(cx) - .active_excerpt(cx) + let anchor = editor.selections.newest_anchor().head(); + if let Some((_, excerpt_range)) = snapshot.excerpt_containing(anchor..anchor) + && let Some(range) = snapshot + .anchor_in_buffer(excerpt_range.context.start) + .zip(snapshot.anchor_in_buffer(excerpt_range.context.end)) + .map(|(start, end)| start..end) { - ranges = vec![multi_buffer::Anchor::range_in_buffer(excerpt_id, range)]; + ranges = vec![range]; } else { ranges = Vec::default(); - } + }; } let mut has_staged_hunks = false; let mut has_unstaged_hunks = false; @@ -715,7 +715,7 @@ impl ProjectDiff { let (was_empty, is_excerpt_newly_added) = self.editor.update(cx, |editor, cx| { let was_empty = editor.rhs_editor().read(cx).buffer().read(cx).is_empty(); - let (_, is_newly_added) = editor.set_excerpts_for_path( + let is_newly_added = editor.update_excerpts_for_path( path_key.clone(), buffer, excerpt_ranges, @@ -735,7 +735,7 @@ impl ProjectDiff { cx, |selections| { selections.select_ranges([ - multi_buffer::Anchor::min()..multi_buffer::Anchor::min() + multi_buffer::Anchor::Min..multi_buffer::Anchor::Min ]) }, ); @@ -785,8 +785,9 @@ impl ProjectDiff { let mut previous_paths = this .multibuffer .read(cx) - .paths() - .cloned() + .snapshot(cx) + .buffers_with_paths() + .map(|(_, path_key)| path_key.clone()) .collect::>(); if let Some(repo) = repo { @@ -877,10 +878,23 @@ impl ProjectDiff { #[cfg(any(test, feature = "test-support"))] pub fn excerpt_paths(&self, cx: &App) -> Vec> { - self.multibuffer + let snapshot = self + .editor() + .read(cx) + .rhs_editor() + .read(cx) + .buffer() .read(cx) - .paths() - .map(|key| key.path.clone()) + .snapshot(cx); + snapshot + .excerpts() + .map(|excerpt| { + snapshot + .path_for_buffer(excerpt.context.start.buffer_id) + .unwrap() + .path + .clone() + }) .collect() } } @@ -1937,7 +1951,7 @@ mod tests { let snapshot = buffer_editor.snapshot(window, cx); let snapshot = &snapshot.buffer_snapshot(); let prev_buffer_hunks = buffer_editor - .diff_hunks_in_ranges(&[editor::Anchor::min()..editor::Anchor::max()], snapshot) + .diff_hunks_in_ranges(&[editor::Anchor::Min..editor::Anchor::Max], snapshot) .collect::>(); buffer_editor.git_restore(&Default::default(), window, cx); prev_buffer_hunks @@ -1950,7 +1964,7 @@ mod tests { let snapshot = buffer_editor.snapshot(window, cx); let snapshot = &snapshot.buffer_snapshot(); buffer_editor - .diff_hunks_in_ranges(&[editor::Anchor::min()..editor::Anchor::max()], snapshot) + .diff_hunks_in_ranges(&[editor::Anchor::Min..editor::Anchor::Max], snapshot) .collect::>() }); assert_eq!(new_buffer_hunks.as_slice(), &[]); @@ -2209,9 +2223,14 @@ mod tests { cx.update(|window, cx| { let editor = diff.read(cx).editor.read(cx).rhs_editor().clone(); - let excerpt_ids = editor.read(cx).buffer().read(cx).excerpt_ids(); - assert_eq!(excerpt_ids.len(), 1); - let excerpt_id = excerpt_ids[0]; + let excerpts = editor + .read(cx) + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .collect::>(); + assert_eq!(excerpts.len(), 1); let buffer = editor .read(cx) .buffer() @@ -2239,7 +2258,6 @@ mod tests { resolve_conflict( editor.downgrade(), - excerpt_id, snapshot.conflicts[0].clone(), vec![ours_range], window, diff --git a/crates/git_ui/src/text_diff_view.rs b/crates/git_ui/src/text_diff_view.rs index 2dfef13f72681456174737af61380b87caae0ae1..fe2add8177e2c9ca92eb8d08776d561e1adaba91 100644 --- a/crates/git_ui/src/text_diff_view.rs +++ b/crates/git_ui/src/text_diff_view.rs @@ -11,7 +11,7 @@ use gpui::{ AnyElement, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, FocusHandle, Focusable, IntoElement, Render, Task, Window, }; -use language::{self, Buffer, Point}; +use language::{self, Buffer, OffsetRangeExt, Point}; use project::Project; use settings::Settings; use std::{ @@ -52,36 +52,26 @@ impl TextDiffView { let selection_data = source_editor.update(cx, |editor, cx| { let multibuffer = editor.buffer(); - let selections = editor.selections.all::(&editor.display_snapshot(cx)); - let first_selection = selections.first()?; - - let (source_buffer, buffer_start, start_excerpt) = multibuffer - .read(cx) - .point_to_buffer_point(first_selection.start, cx)?; - let buffer_end = multibuffer - .read(cx) - .point_to_buffer_point(first_selection.end, cx) - .and_then(|(buf, pt, end_excerpt)| { - (buf.read(cx).remote_id() == source_buffer.read(cx).remote_id() - && end_excerpt == start_excerpt) - .then_some(pt) - }) - .unwrap_or(buffer_start); + let multibuffer_snapshot = multibuffer.read(cx).snapshot(cx); + let first_selection = editor.selections.newest_anchor(); - let buffer_snapshot = source_buffer.read(cx); - let max_point = buffer_snapshot.max_point(); + let (source_buffer, buffer_range) = multibuffer_snapshot + .anchor_range_to_buffer_anchor_range(first_selection.range())?; + let max_point = source_buffer.max_point(); + let buffer_range = buffer_range.to_point(source_buffer); + let source_buffer = multibuffer.read(cx).buffer(source_buffer.remote_id())?; - if first_selection.is_empty() { + if buffer_range.is_empty() { let full_range = Point::new(0, 0)..max_point; return Some((source_buffer, full_range)); } - let expanded_start = Point::new(buffer_start.row, 0); - let expanded_end = if buffer_end.column > 0 { - let next_row = buffer_end.row + 1; + let expanded_start = Point::new(buffer_range.start.row, 0); + let expanded_end = if buffer_range.end.column > 0 { + let next_row = buffer_range.end.row + 1; cmp::min(max_point, Point::new(next_row, 0)) } else { - buffer_end + buffer_range.end }; Some((source_buffer, expanded_start..expanded_end)) }); diff --git a/crates/git_ui/src/worktree_picker.rs b/crates/git_ui/src/worktree_picker.rs index c3e2259e411c7a3a56a36b92735f8d5e014e53d7..1b4497be1f4ea96bd4f0431c97bb538eda9faa57 100644 --- a/crates/git_ui/src/worktree_picker.rs +++ b/crates/git_ui/src/worktree_picker.rs @@ -364,7 +364,7 @@ impl WorktreeListDelegate { workspace .update_in(cx, |workspace, window, cx| { workspace.open_workspace_for_paths( - OpenMode::Replace, + OpenMode::Activate, vec![new_worktree_path], window, cx, @@ -418,7 +418,7 @@ impl WorktreeListDelegate { return; }; let open_mode = if replace_current_window { - OpenMode::Replace + OpenMode::Activate } else { OpenMode::NewWindow }; @@ -754,7 +754,7 @@ impl PickerDelegate for WorktreeListDelegate { if entry.is_new { self.create_worktree(&entry.worktree.display_name(), secondary, None, window, cx); } else { - self.open_worktree(&entry.worktree.path, secondary, window, cx); + self.open_worktree(&entry.worktree.path, !secondary, window, cx); } cx.emit(DismissEvent); diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index 042d9a46b6c76a461e60d9002a2362190e253cd4..03bec51ac209fd6e3c254689b3b7caa2695fa450 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -42,23 +42,22 @@ impl UserCaretPosition { snapshot: &MultiBufferSnapshot, ) -> Self { let selection_end = selection.head(); - let (line, character) = if let Some((buffer_snapshot, point, _)) = - snapshot.point_to_buffer_point(selection_end) - { - let line_start = Point::new(point.row, 0); + let (line, character) = + if let Some((buffer_snapshot, point)) = snapshot.point_to_buffer_point(selection_end) { + let line_start = Point::new(point.row, 0); - let chars_to_last_position = buffer_snapshot - .text_summary_for_range::(line_start..point) - .chars as u32; - (line_start.row, chars_to_last_position) - } else { - let line_start = Point::new(selection_end.row, 0); + let chars_to_last_position = buffer_snapshot + .text_summary_for_range::(line_start..point) + .chars as u32; + (line_start.row, chars_to_last_position) + } else { + let line_start = Point::new(selection_end.row, 0); - let chars_to_last_position = snapshot - .text_summary_for_range::(line_start..selection_end) - .chars as u32; - (selection_end.row, chars_to_last_position) - }; + let chars_to_last_position = snapshot + .text_summary_for_range::(line_start..selection_end) + .chars as u32; + (selection_end.row, chars_to_last_position) + }; Self { line: NonZeroU32::new(line + 1).expect("added 1"), @@ -232,7 +231,7 @@ impl Render for CursorPosition { if let Some(editor) = workspace .active_item(cx) .and_then(|item| item.act_as::(cx)) - && let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx) + && let Some(buffer) = editor.read(cx).active_buffer(cx) { workspace.toggle_modal(window, cx, |window, cx| { crate::GoToLine::new(editor, buffer, window, cx) diff --git a/crates/go_to_line/src/go_to_line.rs b/crates/go_to_line/src/go_to_line.rs index a5332e96c731a29027ea6a69288d7d9556cb2da0..561d6a7d31398ab2a8eb74042fc1a617b7159d33 100644 --- a/crates/go_to_line/src/go_to_line.rs +++ b/crates/go_to_line/src/go_to_line.rs @@ -63,7 +63,7 @@ impl GoToLine { return; }; let editor = editor_handle.read(cx); - let Some((_, buffer, _)) = editor.active_excerpt(cx) else { + let Some(buffer) = editor.active_buffer(cx) else { return; }; workspace.update(cx, |workspace, cx| { @@ -93,11 +93,9 @@ impl GoToLine { let last_line = editor .buffer() .read(cx) - .excerpts_for_buffer(snapshot.remote_id(), cx) - .into_iter() - .map(move |(_, _, range)| { - text::ToPoint::to_point(&range.context.end, &snapshot).row - }) + .snapshot(cx) + .excerpts_for_buffer(snapshot.remote_id()) + .map(move |range| text::ToPoint::to_point(&range.context.end, &snapshot).row) .max() .unwrap_or(0); @@ -230,7 +228,7 @@ impl GoToLine { let character = query_char.unwrap_or(0).saturating_sub(1); let target_multi_buffer_row = MultiBufferRow(row); - let (buffer_snapshot, target_in_buffer, _) = snapshot.point_to_buffer_point(Point::new( + let (buffer_snapshot, target_in_buffer) = snapshot.point_to_buffer_point(Point::new( target_multi_buffer_row.min(snapshot.max_row()).0, 0, ))?; diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index ed441e3b40534690d02b31109e719c60dd5802e0..5a88d81c18db5e790b7bbed0fb9def23bc973e14 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -72,7 +72,7 @@ struct StateInner { scrollbar_drag_start_height: Option, measuring_behavior: ListMeasuringBehavior, pending_scroll: Option, - follow_tail: bool, + follow_state: FollowState, } /// Keeps track of a fractional scroll position within an item for restoration @@ -84,6 +84,49 @@ struct PendingScrollFraction { fraction: f32, } +/// Controls whether the list automatically follows new content at the end. +#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)] +pub enum FollowMode { + /// Normal scrolling — no automatic following. + #[default] + Normal, + /// The list should auto-scroll along with the tail, when scrolled to bottom. + Tail, +} + +#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)] +enum FollowState { + #[default] + Normal, + Tail { + is_following: bool, + }, +} + +impl FollowState { + fn is_following(&self) -> bool { + matches!(self, FollowState::Tail { is_following: true }) + } + + fn has_stopped_following(&self) -> bool { + matches!( + self, + FollowState::Tail { + is_following: false + } + ) + } + + fn start_following(&mut self) { + if let FollowState::Tail { + is_following: false, + } = self + { + *self = FollowState::Tail { is_following: true }; + } + } +} + /// Whether the list is scrolling from top to bottom or bottom to top. #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum ListAlignment { @@ -169,6 +212,7 @@ pub struct ListPrepaintState { #[derive(Clone)] enum ListItem { Unmeasured { + size_hint: Option>, focus_handle: Option, }, Measured { @@ -186,9 +230,16 @@ impl ListItem { } } + fn size_hint(&self) -> Option> { + match self { + ListItem::Measured { size, .. } => Some(*size), + ListItem::Unmeasured { size_hint, .. } => *size_hint, + } + } + fn focus_handle(&self) -> Option { match self { - ListItem::Unmeasured { focus_handle } | ListItem::Measured { focus_handle, .. } => { + ListItem::Unmeasured { focus_handle, .. } | ListItem::Measured { focus_handle, .. } => { focus_handle.clone() } } @@ -196,7 +247,7 @@ impl ListItem { fn contains_focused(&self, window: &Window, cx: &App) -> bool { match self { - ListItem::Unmeasured { focus_handle } | ListItem::Measured { focus_handle, .. } => { + ListItem::Unmeasured { focus_handle, .. } | ListItem::Measured { focus_handle, .. } => { focus_handle .as_ref() .is_some_and(|handle| handle.contains_focused(window, cx)) @@ -240,7 +291,7 @@ impl ListState { scrollbar_drag_start_height: None, measuring_behavior: ListMeasuringBehavior::default(), pending_scroll: None, - follow_tail: false, + follow_state: FollowState::default(), }))); this.splice(0..0, item_count); this @@ -275,37 +326,63 @@ impl ListState { /// Use this when item heights may have changed (e.g., font size changes) /// but the number and identity of items remains the same. pub fn remeasure(&self) { - let state = &mut *self.0.borrow_mut(); + let count = self.item_count(); + self.remeasure_items(0..count); + } - let new_items = state.items.iter().map(|item| ListItem::Unmeasured { - focus_handle: item.focus_handle(), - }); + /// Mark items in `range` as needing remeasurement while preserving + /// the current scroll position. Unlike [`Self::splice`], this does + /// not change the number of items or blow away `logical_scroll_top`. + /// + /// Use this when an item's content has changed and its rendered + /// height may be different (e.g., streaming text, tool results + /// loading), but the item itself still exists at the same index. + pub fn remeasure_items(&self, range: Range) { + let state = &mut *self.0.borrow_mut(); - // If there's a `logical_scroll_top`, we need to keep track of it as a - // `PendingScrollFraction`, so we can later preserve that scroll - // position proportionally to the item, in case the item's height - // changes. + // If the scroll-top item falls within the remeasured range, + // store a fractional offset so the layout can restore the + // proportional scroll position after the item is re-rendered + // at its new height. if let Some(scroll_top) = state.logical_scroll_top { - let mut cursor = state.items.cursor::(()); - cursor.seek(&Count(scroll_top.item_ix), Bias::Right); + if range.contains(&scroll_top.item_ix) { + let mut cursor = state.items.cursor::(()); + cursor.seek(&Count(scroll_top.item_ix), Bias::Right); - if let Some(item) = cursor.item() { - if let Some(size) = item.size() { - let fraction = if size.height.0 > 0.0 { - (scroll_top.offset_in_item.0 / size.height.0).clamp(0.0, 1.0) - } else { - 0.0 - }; - - state.pending_scroll = Some(PendingScrollFraction { - item_ix: scroll_top.item_ix, - fraction, - }); + if let Some(item) = cursor.item() { + if let Some(size) = item.size() { + let fraction = if size.height.0 > 0.0 { + (scroll_top.offset_in_item.0 / size.height.0).clamp(0.0, 1.0) + } else { + 0.0 + }; + + state.pending_scroll = Some(PendingScrollFraction { + item_ix: scroll_top.item_ix, + fraction, + }); + } } } } - state.items = SumTree::from_iter(new_items, ()); + // Rebuild the tree, replacing items in the range with + // Unmeasured copies that keep their focus handles. + let new_items = { + let mut cursor = state.items.cursor::(()); + let mut new_items = cursor.slice(&Count(range.start), Bias::Right); + let invalidated = cursor.slice(&Count(range.end), Bias::Right); + new_items.extend( + invalidated.iter().map(|item| ListItem::Unmeasured { + size_hint: item.size_hint(), + focus_handle: item.focus_handle(), + }), + (), + ); + new_items.append(cursor.suffix(), ()); + new_items + }; + state.items = new_items; state.measuring_behavior.reset(); } @@ -339,7 +416,10 @@ impl ListState { new_items.extend( focus_handles.into_iter().map(|focus_handle| { spliced_count += 1; - ListItem::Unmeasured { focus_handle } + ListItem::Unmeasured { + size_hint: None, + focus_handle, + } }), (), ); @@ -382,6 +462,13 @@ impl ListState { let current_offset = self.logical_scroll_top(); let state = &mut *self.0.borrow_mut(); + + if distance < px(0.) { + if let FollowState::Tail { is_following } = &mut state.follow_state { + *is_following = false; + } + } + let mut cursor = state.items.cursor::(()); cursor.seek(&Count(current_offset.item_ix), Bias::Right); @@ -414,17 +501,37 @@ impl ListState { }); } - /// Set whether the list should automatically follow the tail (auto-scroll to the end). - pub fn set_follow_tail(&self, follow: bool) { - self.0.borrow_mut().follow_tail = follow; - if follow { - self.scroll_to_end(); + /// Set the follow mode for the list. In `Tail` mode, the list + /// will auto-scroll to the end and re-engage after the user + /// scrolls back to the bottom. In `Normal` mode, no automatic + /// following occurs. + pub fn set_follow_mode(&self, mode: FollowMode) { + let state = &mut *self.0.borrow_mut(); + + match mode { + FollowMode::Normal => { + state.follow_state = FollowState::Normal; + } + FollowMode::Tail => { + state.follow_state = FollowState::Tail { is_following: true }; + if matches!(mode, FollowMode::Tail) { + let item_count = state.items.summary().count; + state.logical_scroll_top = Some(ListOffset { + item_ix: item_count, + offset_in_item: px(0.), + }); + } + } } } - /// Returns whether the list is currently in follow-tail mode (auto-scrolling to the end). + /// Returns whether the list is currently actively following the + /// tail (snapping to the end on each layout). pub fn is_following_tail(&self) -> bool { - self.0.borrow().follow_tail + matches!( + self.0.borrow().follow_state, + FollowState::Tail { is_following: true } + ) } /// Scroll the list to the given offset @@ -436,6 +543,12 @@ impl ListState { scroll_top.offset_in_item = px(0.); } + if scroll_top.item_ix < item_count { + if let FollowState::Tail { is_following } = &mut state.follow_state { + *is_following = false; + } + } + state.logical_scroll_top = Some(scroll_top); } @@ -592,6 +705,7 @@ impl StateInner { if self.reset { return; } + let padding = self.last_padding.unwrap_or_default(); let scroll_max = (self.items.summary().height + padding.top + padding.bottom - height).max(px(0.)); @@ -613,8 +727,10 @@ impl StateInner { }); } - if self.follow_tail && delta.y > px(0.) { - self.follow_tail = false; + if let FollowState::Tail { is_following } = &mut self.follow_state { + if delta.y > px(0.) { + *is_following = false; + } } if let Some(handler) = self.scroll_handler.as_mut() { @@ -624,7 +740,10 @@ impl StateInner { visible_range, count: self.items.summary().count, is_scrolled: self.logical_scroll_top.is_some(), - is_following_tail: self.follow_tail, + is_following_tail: matches!( + self.follow_state, + FollowState::Tail { is_following: true } + ), }, window, cx, @@ -715,7 +834,7 @@ impl StateInner { let mut max_item_width = px(0.); let mut scroll_top = self.logical_scroll_top(); - if self.follow_tail { + if self.follow_state.is_following() { scroll_top = ListOffset { item_ix: self.items.summary().count, offset_in_item: px(0.), @@ -868,6 +987,18 @@ impl StateInner { new_items.append(cursor.suffix(), ()); self.items = new_items; + // If follow_tail mode is on but the user scrolled away + // (is_following is false), check whether the current scroll + // position has returned to the bottom. + if self.follow_state.has_stopped_following() { + let padding = self.last_padding.unwrap_or_default(); + let total_height = self.items.summary().height + padding.top + padding.bottom; + let scroll_offset = self.scroll_top(&scroll_top); + if scroll_offset + available_height >= total_height - px(1.0) { + self.follow_state.start_following(); + } + } + // If none of the visible items are focused, check if an off-screen item is focused // and include it to be rendered after the visible items so keyboard interaction continues // to work for it. @@ -1004,7 +1135,7 @@ impl StateInner { content_height - self.scrollbar_drag_start_height.unwrap_or(content_height); let new_scroll_top = (point.y - drag_offset).abs().max(px(0.)).min(scroll_max); - self.follow_tail = false; + self.follow_state = FollowState::Normal; if self.alignment == ListAlignment::Bottom && new_scroll_top == scroll_max { self.logical_scroll_top = None; @@ -1152,6 +1283,7 @@ impl Element for List { { let new_items = SumTree::from_iter( state.items.iter().map(|item| ListItem::Unmeasured { + size_hint: None, focus_handle: item.focus_handle(), }), (), @@ -1238,11 +1370,18 @@ impl sum_tree::Item for ListItem { fn summary(&self, _: ()) -> Self::Summary { match self { - ListItem::Unmeasured { focus_handle } => ListItemSummary { + ListItem::Unmeasured { + size_hint, + focus_handle, + } => ListItemSummary { count: 1, rendered_count: 0, unrendered_count: 1, - height: px(0.), + height: if let Some(size) = size_hint { + size.height + } else { + px(0.) + }, has_focus_handles: focus_handle.is_some(), }, ListItem::Measured { @@ -1312,8 +1451,8 @@ mod test { use std::rc::Rc; use crate::{ - self as gpui, AppContext, Context, Element, IntoElement, ListState, Render, Styled, - TestAppContext, Window, div, list, point, px, size, + self as gpui, AppContext, Context, Element, FollowMode, IntoElement, ListState, Render, + Styled, TestAppContext, Window, div, list, point, px, size, }; #[gpui::test] @@ -1538,7 +1677,7 @@ mod test { }) }); - state.set_follow_tail(true); + state.set_follow_mode(FollowMode::Tail); // First paint — items are 50px, total 500px, viewport 200px. // Follow-tail should anchor to the end. @@ -1592,7 +1731,7 @@ mod test { } } - state.set_follow_tail(true); + state.set_follow_mode(FollowMode::Tail); // Paint with follow-tail — scroll anchored to the bottom. cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, cx| { @@ -1634,7 +1773,7 @@ mod test { let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone()))); - state.set_follow_tail(true); + state.set_follow_mode(FollowMode::Tail); // Paint with follow-tail — scroll anchored to the bottom. cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { @@ -1702,7 +1841,7 @@ mod test { // Enable follow-tail — this should immediately snap the scroll anchor // to the end, like the user just sent a prompt. - state.set_follow_tail(true); + state.set_follow_mode(FollowMode::Tail); cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { view.into_any_element() @@ -1757,4 +1896,201 @@ mod test { -scroll_offset.y, max_offset.y, ); } + + /// When the user scrolls away from the bottom during follow_tail, + /// follow_tail suspends. If they scroll back to the bottom, the + /// next paint should re-engage follow_tail using fresh measurements. + #[gpui::test] + fn test_follow_tail_reengages_when_scrolled_back_to_bottom(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + // 10 items × 50px = 500px total, 200px viewport. + let state = ListState::new(10, crate::ListAlignment::Top, px(0.)); + + struct TestView(ListState); + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + list(self.0.clone(), |_, _, _| { + div().h(px(50.)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone()))); + + state.set_follow_mode(FollowMode::Tail); + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!(state.is_following_tail()); + + // Scroll up — follow_tail should suspend (not fully disengage). + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(50.))), + ..Default::default() + }); + assert!(!state.is_following_tail()); + + // Scroll back down to the bottom. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(-10000.))), + ..Default::default() + }); + + // After a paint, follow_tail should re-engage because the + // layout confirmed we're at the true bottom. + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!( + state.is_following_tail(), + "follow_tail should re-engage after scrolling back to the bottom" + ); + } + + /// When an item is spliced to unmeasured (0px) while follow_tail + /// is suspended, the re-engagement check should still work correctly + #[gpui::test] + fn test_follow_tail_reengagement_not_fooled_by_unmeasured_items(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + // 20 items × 50px = 1000px total, 200px viewport, 1000px + // overdraw so all items get measured during the follow_tail + // paint (matching realistic production settings). + let state = ListState::new(20, crate::ListAlignment::Top, px(1000.)); + + struct TestView(ListState); + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + list(self.0.clone(), |_, _, _| { + div().h(px(50.)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone()))); + + state.set_follow_mode(FollowMode::Tail); + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!(state.is_following_tail()); + + // Scroll up a meaningful amount — suspends follow_tail. + // 20 items × 50px = 1000px. viewport 200px. scroll_max = 800px. + // Scrolling up 200px puts us at 600px, clearly not at bottom. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(200.))), + ..Default::default() + }); + assert!(!state.is_following_tail()); + + // Invalidate the last item (simulates EntryUpdated calling + // remeasure_items). This makes items.summary().height + // temporarily wrong (0px for the invalidated item). + state.remeasure_items(19..20); + + // Paint — layout re-measures the invalidated item with its true + // height. The re-engagement check uses these fresh measurements. + // Since we scrolled 200px up from the 800px max, we're at + // ~600px — NOT at the bottom, so follow_tail should NOT + // re-engage. + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!( + !state.is_following_tail(), + "follow_tail should not falsely re-engage due to an unmeasured item \ + reducing items.summary().height" + ); + } + + /// Calling `set_follow_mode(FollowState::Normal)` or dragging the scrollbar should + /// fully disengage follow_tail — clearing any suspended state so + /// follow_tail won’t auto-re-engage. + #[gpui::test] + fn test_follow_tail_suspended_state_cleared_by_explicit_actions(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + // 10 items × 50px = 500px total, 200px viewport. + let state = ListState::new(10, crate::ListAlignment::Top, px(0.)).measure_all(); + + struct TestView(ListState); + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + list(self.0.clone(), |_, _, _| { + div().h(px(50.)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone()))); + + state.set_follow_mode(FollowMode::Tail); + // --- Part 1: set_follow_mode(FollowState::Normal) clears suspended state --- + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + + // Scroll up — suspends follow_tail. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(50.))), + ..Default::default() + }); + assert!(!state.is_following_tail()); + + // Scroll back to the bottom — should re-engage follow_tail. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(-10000.))), + ..Default::default() + }); + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!( + state.is_following_tail(), + "follow_tail should re-engage after scrolling back to the bottom" + ); + + // --- Part 2: scrollbar drag clears suspended state --- + + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + + // Drag the scrollbar to the middle — should clear suspended state. + state.set_offset_from_scrollbar(point(px(0.), px(150.))); + + // Scroll to the bottom. + cx.simulate_event(ScrollWheelEvent { + position: point(px(50.), px(100.)), + delta: ScrollDelta::Pixels(point(px(0.), px(-10000.))), + ..Default::default() + }); + + // Paint — should NOT re-engage because the scrollbar drag + // cleared the suspended state. + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert!( + !state.is_following_tail(), + "follow_tail should not re-engage after scrollbar drag cleared the suspended state" + ); + } } diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 7790480e32149fa33dfd082df7a8cdbb09568134..f9885f634d962b167bcf32cc459d5bf6e0d5661e 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -5496,6 +5496,8 @@ pub enum ElementId { CodeLocation(core::panic::Location<'static>), /// A labeled child of an element. NamedChild(Arc, SharedString), + /// A byte array ID (used for text-anchors) + OpaqueId([u8; 20]), } impl ElementId { @@ -5517,6 +5519,7 @@ impl Display for ElementId { ElementId::Path(path) => write!(f, "{}", path.display())?, ElementId::CodeLocation(location) => write!(f, "{}", location)?, ElementId::NamedChild(id, name) => write!(f, "{}-{}", id, name)?, + ElementId::OpaqueId(opaque_id) => write!(f, "{:x?}", opaque_id)?, } Ok(()) @@ -5631,6 +5634,12 @@ impl From<&'static core::panic::Location<'static>> for ElementId { } } +impl From<[u8; 20]> for ElementId { + fn from(opaque_id: [u8; 20]) -> Self { + ElementId::OpaqueId(opaque_id) + } +} + /// A rectangle to be rendered in the window at the given position and size. /// Passed as an argument [`Window::paint_quad`]. #[derive(Clone)] diff --git a/crates/gpui_macos/src/window.rs b/crates/gpui_macos/src/window.rs index ace36d695401ce76949129197dcd05135508f7d3..8811a4159a0f539d2bae2c62242a3d5f490686ef 100644 --- a/crates/gpui_macos/src/window.rs +++ b/crates/gpui_macos/src/window.rs @@ -1701,12 +1701,7 @@ impl rwh::HasWindowHandle for MacWindow { impl rwh::HasDisplayHandle for MacWindow { fn display_handle(&self) -> Result, rwh::HandleError> { - // SAFETY: This is a no-op on macOS - unsafe { - Ok(rwh::DisplayHandle::borrow_raw( - rwh::AppKitDisplayHandle::new().into(), - )) - } + Ok(rwh::DisplayHandle::appkit()) } } diff --git a/crates/gpui_wgpu/src/wgpu_atlas.rs b/crates/gpui_wgpu/src/wgpu_atlas.rs index 3eba5c533f80d727425cc87ae89b754afa8722b1..55f6edee21b9f2da02268c66c665c34d5b52066a 100644 --- a/crates/gpui_wgpu/src/wgpu_atlas.rs +++ b/crates/gpui_wgpu/src/wgpu_atlas.rs @@ -115,6 +115,8 @@ impl PlatformAtlas for WgpuAtlas { if let Some(mut texture) = texture_slot.take() { texture.decrement_ref_count(); if texture.is_unreferenced() { + lock.pending_uploads + .retain(|upload| upload.id != texture.id); lock.storage[id.kind] .free_list .push(texture.id.index as usize); @@ -228,7 +230,9 @@ impl WgpuAtlasState { fn flush_uploads(&mut self) { for upload in self.pending_uploads.drain(..) { - let texture = &self.storage[upload.id]; + let Some(texture) = self.storage.get(upload.id) else { + continue; + }; let bytes_per_pixel = texture.bytes_per_pixel(); self.queue.write_texture( @@ -286,6 +290,15 @@ impl ops::IndexMut for WgpuAtlasStorage { } } +impl WgpuAtlasStorage { + fn get(&self, id: AtlasTextureId) -> Option<&WgpuAtlasTexture> { + self[id.kind] + .textures + .get(id.index as usize) + .and_then(|t| t.as_ref()) + } +} + impl ops::Index for WgpuAtlasStorage { type Output = WgpuAtlasTexture; fn index(&self, id: AtlasTextureId) -> &Self::Output { @@ -341,3 +354,70 @@ impl WgpuAtlasTexture { self.live_atlas_keys == 0 } } + +#[cfg(all(test, not(target_family = "wasm")))] +mod tests { + use super::*; + use gpui::{ImageId, RenderImageParams}; + use pollster::block_on; + use std::sync::Arc; + + fn test_device_and_queue() -> anyhow::Result<(Arc, Arc)> { + block_on(async { + let instance = wgpu::Instance::new(wgpu::InstanceDescriptor { + backends: wgpu::Backends::all(), + flags: wgpu::InstanceFlags::default(), + backend_options: wgpu::BackendOptions::default(), + memory_budget_thresholds: wgpu::MemoryBudgetThresholds::default(), + display: None, + }); + let adapter = instance + .request_adapter(&wgpu::RequestAdapterOptions { + power_preference: wgpu::PowerPreference::LowPower, + compatible_surface: None, + force_fallback_adapter: false, + }) + .await + .map_err(|error| anyhow::anyhow!("failed to request adapter: {error}"))?; + let (device, queue) = adapter + .request_device(&wgpu::DeviceDescriptor { + label: Some("wgpu_atlas_test_device"), + required_features: wgpu::Features::empty(), + required_limits: wgpu::Limits::downlevel_defaults() + .using_resolution(adapter.limits()) + .using_alignment(adapter.limits()), + memory_hints: wgpu::MemoryHints::MemoryUsage, + trace: wgpu::Trace::Off, + experimental_features: wgpu::ExperimentalFeatures::disabled(), + }) + .await + .map_err(|error| anyhow::anyhow!("failed to request device: {error}"))?; + Ok((Arc::new(device), Arc::new(queue))) + }) + } + + #[test] + fn before_frame_skips_uploads_for_removed_texture() -> anyhow::Result<()> { + let (device, queue) = test_device_and_queue()?; + + let atlas = WgpuAtlas::new(device, queue); + let key = AtlasKey::Image(RenderImageParams { + image_id: ImageId(1), + frame_index: 0, + }); + let size = Size { + width: DevicePixels(1), + height: DevicePixels(1), + }; + let mut build = || Ok(Some((size, Cow::Owned(vec![0, 0, 0, 255])))); + + // Regression test: before the fix, this panicked in flush_uploads + atlas + .get_or_insert_with(&key, &mut build)? + .expect("tile should be created"); + atlas.remove(&key); + atlas.before_frame(); + + Ok(()) + } +} diff --git a/crates/gpui_windows/src/window.rs b/crates/gpui_windows/src/window.rs index 92255f93fd95969931c6b1ae8cb465ff628f82cb..f655c1989e2c69743032703532f91b3b517084b6 100644 --- a/crates/gpui_windows/src/window.rs +++ b/crates/gpui_windows/src/window.rs @@ -540,10 +540,9 @@ impl rwh::HasWindowHandle for WindowsWindow { } } -// todo(windows) impl rwh::HasDisplayHandle for WindowsWindow { fn display_handle(&self) -> std::result::Result, rwh::HandleError> { - unimplemented!() + Ok(rwh::DisplayHandle::windows()) } } diff --git a/crates/grammars/src/c/highlights.scm b/crates/grammars/src/c/highlights.scm index dc5a3bd99937eb3cd1a3af6efb7124aebc4008f1..b73c8e80b8acb61cc0cf47ed6585202eb73f4a7b 100644 --- a/crates/grammars/src/c/highlights.scm +++ b/crates/grammars/src/c/highlights.scm @@ -38,7 +38,7 @@ "#ifndef" "#include" (preproc_directive) -] @preproc +] @keyword.preproc @preproc [ "=" diff --git a/crates/grammars/src/cpp/highlights.scm b/crates/grammars/src/cpp/highlights.scm index e074707d05dec638a1be9ea840c31f47537c438a..281da4215c8269172816c6f37a5e6e866c04a140 100644 --- a/crates/grammars/src/cpp/highlights.scm +++ b/crates/grammars/src/cpp/highlights.scm @@ -196,7 +196,7 @@ type: (primitive_type) @type.builtin "#ifndef" "#include" (preproc_directive) -] @preproc +] @keyword.preproc @preproc (comment) @comment diff --git a/crates/grammars/src/diff/highlights.scm b/crates/grammars/src/diff/highlights.scm index a2e33190f154d6a210572dbb066000dca6f30455..3c9abbe147b6554d6894d5d8d3c8bcf5d93e2edd 100644 --- a/crates/grammars/src/diff/highlights.scm +++ b/crates/grammars/src/diff/highlights.scm @@ -3,15 +3,13 @@ [ (addition) (new_file) -] @string +] @string @diff.plus -; TODO: This should eventually be `@diff.plus` with a fallback of `@string` [ (deletion) (old_file) -] @keyword +] @keyword @diff.minus -; TODO: This should eventually be `@diff.minus` with a fallback of `@keyword` (commit) @constant (location) @attribute diff --git a/crates/grammars/src/javascript/highlights.scm b/crates/grammars/src/javascript/highlights.scm index 4af87cc578e3060e72d1e1374f4904d8c7629ddf..f6354dd3a016f544e5be1616c3dfb12144855775 100644 --- a/crates/grammars/src/javascript/highlights.scm +++ b/crates/grammars/src/javascript/highlights.scm @@ -328,26 +328,26 @@ ; JSX elements (jsx_opening_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) ]) (jsx_closing_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) ]) (jsx_self_closing_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) ]) (jsx_opening_element diff --git a/crates/grammars/src/tsx/highlights.scm b/crates/grammars/src/tsx/highlights.scm index 482bba7f081a44b78a2f2d72c3435d8a6419b874..0f203e7112cf14268d0edfed39b5624375d1a859 100644 --- a/crates/grammars/src/tsx/highlights.scm +++ b/crates/grammars/src/tsx/highlights.scm @@ -389,26 +389,26 @@ (jsx_opening_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) ]) (jsx_closing_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) ]) (jsx_self_closing_element [ - (identifier) @type + (identifier) @type @tag.component.jsx (member_expression - object: (identifier) @type - property: (property_identifier) @type) + object: (identifier) @type @tag.component.jsx + property: (property_identifier) @type @tag.component.jsx) ]) (jsx_opening_element diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index 6929ae4e4ca8ca0ee00c9793c948892043dd6dd6..e29b7d3593025556771d62dc0124786672c540de 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -95,6 +95,7 @@ pub enum IconName { DebugStepOver, Diff, DiffSplit, + DiffSplitAuto, DiffUnified, Disconnected, Download, diff --git a/crates/inspector_ui/src/div_inspector.rs b/crates/inspector_ui/src/div_inspector.rs index a7616e134a16bbe2b96a6d23d20453b9a5ee4e5f..7ec2d7ba8303e899331d3f38642a9a51f4c14d4c 100644 --- a/crates/inspector_ui/src/div_inspector.rs +++ b/crates/inspector_ui/src/div_inspector.rs @@ -1,7 +1,6 @@ use anyhow::{Result, anyhow}; use editor::{ - Bias, CompletionProvider, Editor, EditorEvent, EditorMode, ExcerptId, MinimapVisibility, - MultiBuffer, + Bias, CompletionProvider, Editor, EditorEvent, EditorMode, MinimapVisibility, MultiBuffer, }; use fuzzy::StringMatch; use gpui::{ @@ -641,7 +640,6 @@ struct RustStyleCompletionProvider { impl CompletionProvider for RustStyleCompletionProvider { fn completions( &self, - _excerpt_id: ExcerptId, buffer: &Entity, position: Anchor, _: editor::CompletionContext, diff --git a/crates/keymap_editor/src/action_completion_provider.rs b/crates/keymap_editor/src/action_completion_provider.rs index 98428baeb2f7b419ba7354130e12f1a4710c8aea..10d977572b9c52cba1ad9d87c7035bd1552d5e33 100644 --- a/crates/keymap_editor/src/action_completion_provider.rs +++ b/crates/keymap_editor/src/action_completion_provider.rs @@ -26,7 +26,6 @@ impl ActionCompletionProvider { impl CompletionProvider for ActionCompletionProvider { fn completions( &self, - _excerpt_id: editor::ExcerptId, buffer: &Entity, buffer_position: language::Anchor, _trigger: editor::CompletionContext, diff --git a/crates/keymap_editor/src/keymap_editor.rs b/crates/keymap_editor/src/keymap_editor.rs index 2e3172dac95fe91ed5b2a5a187ca57bbd9154fae..ee9f6a11c2b51f7993b17c01352cfb97b535049a 100644 --- a/crates/keymap_editor/src/keymap_editor.rs +++ b/crates/keymap_editor/src/keymap_editor.rs @@ -3480,7 +3480,6 @@ struct KeyContextCompletionProvider { impl CompletionProvider for KeyContextCompletionProvider { fn completions( &self, - _excerpt_id: editor::ExcerptId, buffer: &Entity, buffer_position: language::Anchor, _trigger: editor::CompletionContext, diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index b2ab420312249f809599d06315e706627b76570b..a467cd789555d39a32ad4e1d7b21da7b14df9c25 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -5549,11 +5549,11 @@ impl<'a> BufferChunks<'a> { && range.start >= capture.node.start_byte() { let next_capture_end = capture.node.end_byte(); - if range.start < next_capture_end { - highlights.stack.push(( - next_capture_end, - highlights.highlight_maps[capture.grammar_index].get(capture.index), - )); + if range.start < next_capture_end + && let Some(capture_id) = + highlights.highlight_maps[capture.grammar_index].get(capture.index) + { + highlights.stack.push((next_capture_end, capture_id)); } highlights.next_capture.take(); } @@ -5688,9 +5688,11 @@ impl<'a> Iterator for BufferChunks<'a> { } else { let highlight_id = highlights.highlight_maps[capture.grammar_index].get(capture.index); - highlights - .stack - .push((capture.node.end_byte(), highlight_id)); + if let Some(highlight_id) = highlight_id { + highlights + .stack + .push((capture.node.end_byte(), highlight_id)); + } highlights.next_capture = highlights.captures.next(); } } diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index fa3263df48ff773b32332980e7341fa8a453ba4f..04564ecd6575f9470315e0571a60126c69d81d2b 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -326,23 +326,17 @@ impl DiagnosticEntry { } } -impl Default for Summary { - fn default() -> Self { - Self { - start: Anchor::MIN, - end: Anchor::MAX, - min_start: Anchor::MAX, - max_end: Anchor::MIN, - count: 0, - } - } -} - impl sum_tree::Summary for Summary { type Context<'a> = &'a text::BufferSnapshot; - fn zero(_cx: Self::Context<'_>) -> Self { - Default::default() + fn zero(buffer: &text::BufferSnapshot) -> Self { + Self { + start: Anchor::min_for_buffer(buffer.remote_id()), + end: Anchor::max_for_buffer(buffer.remote_id()), + min_start: Anchor::max_for_buffer(buffer.remote_id()), + max_end: Anchor::min_for_buffer(buffer.remote_id()), + count: 0, + } } fn add_summary(&mut self, other: &Self, buffer: Self::Context<'_>) { diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 035cb3a2009241cc4ff97a7adf4c82de73166a76..43bbe7a08c73e476a41aec8af015464aa3af853d 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1023,9 +1023,7 @@ impl Language { BufferChunks::new(text, range, Some((captures, highlight_maps)), false, None) { let end_offset = offset + chunk.text.len(); - if let Some(highlight_id) = chunk.syntax_highlight_id - && !highlight_id.is_default() - { + if let Some(highlight_id) = chunk.syntax_highlight_id { result.push((offset..end_offset, highlight_id)); } offset = end_offset; @@ -1077,11 +1075,11 @@ impl Language { #[inline] pub fn build_highlight_map(capture_names: &[&str], theme: &SyntaxTheme) -> HighlightMap { - HighlightMap::from_ids(capture_names.iter().map(|capture_name| { - theme - .highlight_id(capture_name) - .map_or(HighlightId::default(), HighlightId) - })) + HighlightMap::from_ids( + capture_names + .iter() + .map(|capture_name| theme.highlight_id(capture_name).map(HighlightId::new)), + ) } impl LanguageScope { @@ -1645,9 +1643,18 @@ mod tests { ]; let map = build_highlight_map(capture_names, &theme); - assert_eq!(theme.get_capture_name(map.get(0)), Some("function")); - assert_eq!(theme.get_capture_name(map.get(1)), Some("function.async")); - assert_eq!(theme.get_capture_name(map.get(2)), Some("variable.builtin")); + assert_eq!( + theme.get_capture_name(map.get(0).unwrap()), + Some("function") + ); + assert_eq!( + theme.get_capture_name(map.get(1).unwrap()), + Some("function.async") + ); + assert_eq!( + theme.get_capture_name(map.get(2).unwrap()), + Some("variable.builtin") + ); } #[gpui::test(iterations = 10)] diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 89c44513067f6d2309d68a9f38984988358d8877..5e3179e929da012cce8e7da6b436e89c0c4519de 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -174,11 +174,11 @@ pub fn serialize_selection(selection: &Selection) -> proto::Selection { id: selection.id as u64, start: Some(proto::EditorAnchor { anchor: Some(serialize_anchor(&selection.start)), - excerpt_id: 0, + excerpt_id: None, }), end: Some(proto::EditorAnchor { anchor: Some(serialize_anchor(&selection.end)), - excerpt_id: 0, + excerpt_id: None, }), reversed: selection.reversed, } @@ -260,7 +260,7 @@ pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor { Bias::Left => proto::Bias::Left as i32, Bias::Right => proto::Bias::Right as i32, }, - buffer_id: anchor.buffer_id.map(Into::into), + buffer_id: Some(anchor.buffer_id.into()), } } @@ -498,7 +498,7 @@ pub fn deserialize_anchor(anchor: proto::Anchor) -> Option { timestamp, anchor.offset as u32, bias, - buffer_id, + buffer_id?, )) } diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index f2f79b9a793f303fef66fb4266d67f1fbd2ed52d..b73276ffd92be8915e2272b5242770fc52854af1 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -18,7 +18,7 @@ use std::{ }; use streaming_iterator::StreamingIterator; use sum_tree::{Bias, Dimensions, SeekTarget, SumTree}; -use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint}; +use text::{Anchor, BufferId, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint}; use tree_sitter::{ Node, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatch, QueryMatches, QueryPredicateArg, @@ -56,7 +56,15 @@ impl Drop for SyntaxSnapshot { // This does allocate a new Arc, but it's cheap and avoids blocking the main thread without needing to use an `Option` or `MaybeUninit`. let _ = DROP_TX.send(std::mem::replace( &mut self.layers, - SumTree::from_summary(Default::default()), + SumTree::from_summary(SyntaxLayerSummary { + min_depth: Default::default(), + max_depth: Default::default(), + // Deliberately bogus anchors, doesn't matter in this context + range: Anchor::min_min_range_for_buffer(BufferId::new(1).unwrap()), + last_layer_range: Anchor::min_min_range_for_buffer(BufferId::new(1).unwrap()), + last_layer_language: Default::default(), + contains_unknown_injections: Default::default(), + }), )); } } @@ -588,7 +596,7 @@ impl SyntaxSnapshot { let bounded_position = SyntaxLayerPositionBeforeChange { position: position.clone(), - change: changed_regions.start_position(), + change: changed_regions.start_position(text.remote_id()), }; if bounded_position.cmp(cursor.start(), text).is_gt() { let slice = cursor.slice(&bounded_position, Bias::Left); @@ -1946,11 +1954,11 @@ impl ChangedRegion { } impl ChangeRegionSet { - fn start_position(&self) -> ChangeStartPosition { + fn start_position(&self, buffer_id: BufferId) -> ChangeStartPosition { self.0.first().map_or( ChangeStartPosition { depth: usize::MAX, - position: Anchor::MAX, + position: Anchor::max_for_buffer(buffer_id), }, |region| ChangeStartPosition { depth: region.depth, @@ -1999,32 +2007,28 @@ impl ChangeRegionSet { } } -impl Default for SyntaxLayerSummary { - fn default() -> Self { +impl sum_tree::Summary for SyntaxLayerSummary { + type Context<'a> = &'a BufferSnapshot; + + fn zero(buffer: &BufferSnapshot) -> Self { Self { max_depth: 0, min_depth: 0, - range: Anchor::MAX..Anchor::MIN, - last_layer_range: Anchor::MIN..Anchor::MAX, + range: Anchor::max_for_buffer(buffer.remote_id()) + ..Anchor::min_for_buffer(buffer.remote_id()), + last_layer_range: Anchor::min_for_buffer(buffer.remote_id()) + ..Anchor::max_for_buffer(buffer.remote_id()), last_layer_language: None, contains_unknown_injections: false, } } -} - -impl sum_tree::Summary for SyntaxLayerSummary { - type Context<'a> = &'a BufferSnapshot; - - fn zero(_cx: &BufferSnapshot) -> Self { - Default::default() - } fn add_summary(&mut self, other: &Self, buffer: Self::Context<'_>) { if other.max_depth > self.max_depth { self.max_depth = other.max_depth; self.range = other.range.clone(); } else { - if self.range == (Anchor::MAX..Anchor::MAX) { + if self.range.start.is_max() && self.range.end.is_max() { self.range.start = other.range.start; } if other.range.end.cmp(&self.range.end, buffer).is_gt() { diff --git a/crates/language_core/src/grammar.rs b/crates/language_core/src/grammar.rs index 77e3805e52415a20f5d343bff98682744a50fdc2..54e9a3f1b3309718436b206874802779925a9d04 100644 --- a/crates/language_core/src/grammar.rs +++ b/crates/language_core/src/grammar.rs @@ -275,12 +275,11 @@ impl Grammar { } pub fn highlight_id_for_name(&self, name: &str) -> Option { - let capture_id = self - .highlights_config + self.highlights_config .as_ref()? .query - .capture_index_for_name(name)?; - Some(self.highlight_map.lock().get(capture_id)) + .capture_index_for_name(name) + .and_then(|capture_id| self.highlight_map.lock().get(capture_id)) } pub fn debug_variables_config(&self) -> Option<&DebugVariablesConfig> { diff --git a/crates/language_core/src/highlight_map.rs b/crates/language_core/src/highlight_map.rs index 1235c7d62c72950f57de0cdad1363f49d8fbbd96..cba5cda6f7501a04966d5ce512e2fed700724d1a 100644 --- a/crates/language_core/src/highlight_map.rs +++ b/crates/language_core/src/highlight_map.rs @@ -1,35 +1,35 @@ -use std::sync::Arc; +use std::{num::NonZeroU32, sync::Arc}; #[derive(Clone, Debug)] -pub struct HighlightMap(Arc<[HighlightId]>); +pub struct HighlightMap(Arc<[Option]>); #[derive(Clone, Copy, Debug, PartialEq, Eq)] -pub struct HighlightId(pub u32); +pub struct HighlightId(NonZeroU32); -const DEFAULT_SYNTAX_HIGHLIGHT_ID: HighlightId = HighlightId(u32::MAX); +impl HighlightId { + pub const TABSTOP_INSERT_ID: HighlightId = HighlightId(NonZeroU32::new(u32::MAX - 1).unwrap()); + pub const TABSTOP_REPLACE_ID: HighlightId = HighlightId(NonZeroU32::new(u32::MAX - 2).unwrap()); -impl HighlightMap { - #[inline] - pub fn from_ids(highlight_ids: impl IntoIterator) -> Self { - Self(highlight_ids.into_iter().collect()) + pub fn new(capture_id: u32) -> Self { + Self(NonZeroU32::new(capture_id + 1).unwrap_or(NonZeroU32::MAX)) } +} - #[inline] - pub fn get(&self, capture_id: u32) -> HighlightId { - self.0 - .get(capture_id as usize) - .copied() - .unwrap_or(DEFAULT_SYNTAX_HIGHLIGHT_ID) +impl From for usize { + fn from(value: HighlightId) -> Self { + value.0.get() as usize - 1 } } -impl HighlightId { - pub const TABSTOP_INSERT_ID: HighlightId = HighlightId(u32::MAX - 1); - pub const TABSTOP_REPLACE_ID: HighlightId = HighlightId(u32::MAX - 2); +impl HighlightMap { + #[inline] + pub fn from_ids(highlight_ids: impl IntoIterator>) -> Self { + Self(highlight_ids.into_iter().collect()) + } #[inline] - pub fn is_default(&self) -> bool { - *self == DEFAULT_SYNTAX_HIGHLIGHT_ID + pub fn get(&self, capture_id: u32) -> Option { + self.0.get(capture_id as usize).copied().flatten() } } @@ -38,15 +38,3 @@ impl Default for HighlightMap { Self(Arc::new([])) } } - -impl Default for HighlightId { - fn default() -> Self { - DEFAULT_SYNTAX_HIGHLIGHT_ID - } -} - -impl From for usize { - fn from(value: HighlightId) -> Self { - value.0 as usize - } -} diff --git a/crates/language_extension/src/extension_lsp_adapter.rs b/crates/language_extension/src/extension_lsp_adapter.rs index 13899f11c30556db189da48ed1fcb4b5d12b2f20..3c28e07e6b306ea3a0ce644ac688f9fab8d6125f 100644 --- a/crates/language_extension/src/extension_lsp_adapter.rs +++ b/crates/language_extension/src/extension_lsp_adapter.rs @@ -684,7 +684,7 @@ fn test_build_code_label() { ); let code_runs = code_ranges .into_iter() - .map(|range| (range, HighlightId(0))) + .map(|range| (range, HighlightId::new(0))) .collect::>(); let label = build_code_label( @@ -707,7 +707,7 @@ fn test_build_code_label() { marked_text_ranges("pqrs.tuv: «fn»(«Bcd»(«Efgh»)) -> «Ijklm»", false); let label_runs = label_ranges .into_iter() - .map(|range| (range, HighlightId(0))) + .map(|range| (range, HighlightId::new(0))) .collect::>(); assert_eq!( @@ -723,7 +723,7 @@ fn test_build_code_label_with_invalid_ranges() { let (code, code_ranges) = marked_text_ranges("const «a»: «B» = '🏀'", false); let code_runs = code_ranges .into_iter() - .map(|range| (range, HighlightId(0))) + .map(|range| (range, HighlightId::new(0))) .collect::>(); // A span uses a code range that is invalid because it starts inside of diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index 911100fc25b498ba5471c85d6177052495974665..4712d86dff6c44f9cdd8576a08349ccfa7d0ecca 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -20,11 +20,11 @@ anthropic = { workspace = true, features = ["schemars"] } anyhow.workspace = true credentials_provider.workspace = true base64.workspace = true -client.workspace = true cloud_api_client.workspace = true cloud_api_types.workspace = true cloud_llm_client.workspace = true collections.workspace = true +env_var.workspace = true futures.workspace = true gpui.workspace = true http_client.workspace = true @@ -40,7 +40,6 @@ serde_json.workspace = true smol.workspace = true thiserror.workspace = true util.workspace = true -zed_env_vars.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/language_model/src/api_key.rs b/crates/language_model/src/api_key.rs index 754fde069295d8799820020bef286b1a1a3c590c..4be5a64d3db6231c98b830a524d5e299faace457 100644 --- a/crates/language_model/src/api_key.rs +++ b/crates/language_model/src/api_key.rs @@ -1,5 +1,6 @@ use anyhow::{Result, anyhow}; use credentials_provider::CredentialsProvider; +use env_var::EnvVar; use futures::{FutureExt, future}; use gpui::{AsyncApp, Context, SharedString, Task}; use std::{ @@ -7,7 +8,6 @@ use std::{ sync::Arc, }; use util::ResultExt as _; -use zed_env_vars::EnvVar; use crate::AuthenticateError; @@ -101,6 +101,7 @@ impl ApiKeyState { url: SharedString, key: Option, get_this: impl Fn(&mut Ent) -> &mut Self + 'static, + provider: Arc, cx: &Context, ) -> Task> { if self.is_from_env_var() { @@ -108,18 +109,14 @@ impl ApiKeyState { "bug: attempted to store API key in system keychain when API key is from env var", ))); } - let credentials_provider = ::global(cx); cx.spawn(async move |ent, cx| { if let Some(key) = &key { - credentials_provider + provider .write_credentials(&url, "Bearer", key.as_bytes(), cx) .await .log_err(); } else { - credentials_provider - .delete_credentials(&url, cx) - .await - .log_err(); + provider.delete_credentials(&url, cx).await.log_err(); } ent.update(cx, |ent, cx| { let this = get_this(ent); @@ -144,12 +141,13 @@ impl ApiKeyState { &mut self, url: SharedString, get_this: impl Fn(&mut Ent) -> &mut Self + Clone + 'static, + provider: Arc, cx: &mut Context, ) { if url != self.url { if !self.is_from_env_var() { // loading will continue even though this result task is dropped - let _task = self.load_if_needed(url, get_this, cx); + let _task = self.load_if_needed(url, get_this, provider, cx); } } } @@ -163,6 +161,7 @@ impl ApiKeyState { &mut self, url: SharedString, get_this: impl Fn(&mut Ent) -> &mut Self + Clone + 'static, + provider: Arc, cx: &mut Context, ) -> Task> { if let LoadStatus::Loaded { .. } = &self.load_status @@ -185,7 +184,7 @@ impl ApiKeyState { let task = if let Some(load_task) = &self.load_task { load_task.clone() } else { - let load_task = Self::load(url.clone(), get_this.clone(), cx).shared(); + let load_task = Self::load(url.clone(), get_this.clone(), provider, cx).shared(); self.url = url; self.load_status = LoadStatus::NotPresent; self.load_task = Some(load_task.clone()); @@ -206,14 +205,13 @@ impl ApiKeyState { fn load( url: SharedString, get_this: impl Fn(&mut Ent) -> &mut Self + 'static, + provider: Arc, cx: &Context, ) -> Task<()> { - let credentials_provider = ::global(cx); cx.spawn({ async move |ent, cx| { let load_status = - ApiKey::load_from_system_keychain_impl(&url, credentials_provider.as_ref(), cx) - .await; + ApiKey::load_from_system_keychain_impl(&url, provider.as_ref(), cx).await; ent.update(cx, |ent, cx| { let this = get_this(ent); this.url = url; diff --git a/crates/language_model/src/fake_provider.rs b/crates/language_model/src/fake_provider.rs index ae01084a2657abdc86e7510aa49663cf98aabe70..50037f31facbac446de7ecf38536d1e4a24c7867 100644 --- a/crates/language_model/src/fake_provider.rs +++ b/crates/language_model/src/fake_provider.rs @@ -125,6 +125,7 @@ pub struct FakeLanguageModel { >, forbid_requests: AtomicBool, supports_thinking: AtomicBool, + supports_streaming_tools: AtomicBool, } impl Default for FakeLanguageModel { @@ -137,6 +138,7 @@ impl Default for FakeLanguageModel { current_completion_txs: Mutex::new(Vec::new()), forbid_requests: AtomicBool::new(false), supports_thinking: AtomicBool::new(false), + supports_streaming_tools: AtomicBool::new(false), } } } @@ -169,6 +171,10 @@ impl FakeLanguageModel { self.supports_thinking.store(supports, SeqCst); } + pub fn set_supports_streaming_tools(&self, supports: bool) { + self.supports_streaming_tools.store(supports, SeqCst); + } + pub fn pending_completions(&self) -> Vec { self.current_completion_txs .lock() @@ -282,6 +288,10 @@ impl LanguageModel for FakeLanguageModel { self.supports_thinking.load(SeqCst) } + fn supports_streaming_tools(&self) -> bool { + self.supports_streaming_tools.load(SeqCst) + } + fn telemetry_id(&self) -> String { "fake".to_string() } diff --git a/crates/language_model/src/language_model.rs b/crates/language_model/src/language_model.rs index ce71cee6bcaf4f7ea1e210cc3756bd3162715f55..3f309b7b1d4152c54324efaaf0ad3bdb7035eea4 100644 --- a/crates/language_model/src/language_model.rs +++ b/crates/language_model/src/language_model.rs @@ -11,12 +11,10 @@ pub mod tool_schema; pub mod fake_provider; use anyhow::{Result, anyhow}; -use client::Client; -use client::UserStore; use cloud_llm_client::CompletionRequestStatus; use futures::FutureExt; use futures::{StreamExt, future::BoxFuture, stream::BoxStream}; -use gpui::{AnyView, App, AsyncApp, Entity, SharedString, Task, Window}; +use gpui::{AnyView, App, AsyncApp, SharedString, Task, Window}; use http_client::{StatusCode, http}; use icons::IconName; use parking_lot::Mutex; @@ -36,15 +34,10 @@ pub use crate::registry::*; pub use crate::request::*; pub use crate::role::*; pub use crate::tool_schema::LanguageModelToolSchemaFormat; +pub use env_var::{EnvVar, env_var}; pub use provider::*; -pub use zed_env_vars::{EnvVar, env_var}; -pub fn init(user_store: Entity, client: Arc, cx: &mut App) { - init_settings(cx); - RefreshLlmTokenListener::register(client, user_store, cx); -} - -pub fn init_settings(cx: &mut App) { +pub fn init(cx: &mut App) { registry::init(cx); } diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index a1362d78292082522f4e883efe42b2ca1e0a0300..db926aab1f70a46a4e70b1b67c2c9e4c4f465c2c 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -1,16 +1,9 @@ use std::fmt; use std::sync::Arc; -use anyhow::{Context as _, Result}; -use client::Client; -use client::UserStore; use cloud_api_client::ClientApiError; +use cloud_api_client::CloudApiClient; use cloud_api_types::OrganizationId; -use cloud_api_types::websocket_protocol::MessageToClient; -use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME}; -use gpui::{ - App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _, Subscription, -}; use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard}; use thiserror::Error; @@ -30,18 +23,12 @@ impl fmt::Display for PaymentRequiredError { pub struct LlmApiToken(Arc>>); impl LlmApiToken { - pub fn global(cx: &App) -> Self { - RefreshLlmTokenListener::global(cx) - .read(cx) - .llm_api_token - .clone() - } - pub async fn acquire( &self, - client: &Arc, + client: &CloudApiClient, + system_id: Option, organization_id: Option, - ) -> Result { + ) -> Result { let lock = self.0.upgradable_read().await; if let Some(token) = lock.as_ref() { Ok(token.to_string()) @@ -49,6 +36,7 @@ impl LlmApiToken { Self::fetch( RwLockUpgradableReadGuard::upgrade(lock).await, client, + system_id, organization_id, ) .await @@ -57,10 +45,11 @@ impl LlmApiToken { pub async fn refresh( &self, - client: &Arc, + client: &CloudApiClient, + system_id: Option, organization_id: Option, - ) -> Result { - Self::fetch(self.0.write().await, client, organization_id).await + ) -> Result { + Self::fetch(self.0.write().await, client, system_id, organization_id).await } /// Clears the existing token before attempting to fetch a new one. @@ -69,28 +58,22 @@ impl LlmApiToken { /// leave a token for the wrong organization. pub async fn clear_and_refresh( &self, - client: &Arc, + client: &CloudApiClient, + system_id: Option, organization_id: Option, - ) -> Result { + ) -> Result { let mut lock = self.0.write().await; *lock = None; - Self::fetch(lock, client, organization_id).await + Self::fetch(lock, client, system_id, organization_id).await } async fn fetch( mut lock: RwLockWriteGuard<'_, Option>, - client: &Arc, + client: &CloudApiClient, + system_id: Option, organization_id: Option, - ) -> Result { - let system_id = client - .telemetry() - .system_id() - .map(|system_id| system_id.to_string()); - - let result = client - .cloud_client() - .create_llm_token(system_id, organization_id) - .await; + ) -> Result { + let result = client.create_llm_token(system_id, organization_id).await; match result { Ok(response) => { *lock = Some(response.token.0.clone()); @@ -98,112 +81,7 @@ impl LlmApiToken { } Err(err) => { *lock = None; - match err { - ClientApiError::Unauthorized => { - client.request_sign_out(); - Err(err).context("Failed to create LLM token") - } - ClientApiError::Other(err) => Err(err), - } - } - } - } -} - -pub trait NeedsLlmTokenRefresh { - /// Returns whether the LLM token needs to be refreshed. - fn needs_llm_token_refresh(&self) -> bool; -} - -impl NeedsLlmTokenRefresh for http_client::Response { - fn needs_llm_token_refresh(&self) -> bool { - self.headers().get(EXPIRED_LLM_TOKEN_HEADER_NAME).is_some() - || self.headers().get(OUTDATED_LLM_TOKEN_HEADER_NAME).is_some() - } -} - -enum TokenRefreshMode { - Refresh, - ClearAndRefresh, -} - -struct GlobalRefreshLlmTokenListener(Entity); - -impl Global for GlobalRefreshLlmTokenListener {} - -pub struct LlmTokenRefreshedEvent; - -pub struct RefreshLlmTokenListener { - client: Arc, - user_store: Entity, - llm_api_token: LlmApiToken, - _subscription: Subscription, -} - -impl EventEmitter for RefreshLlmTokenListener {} - -impl RefreshLlmTokenListener { - pub fn register(client: Arc, user_store: Entity, cx: &mut App) { - let listener = cx.new(|cx| RefreshLlmTokenListener::new(client, user_store, cx)); - cx.set_global(GlobalRefreshLlmTokenListener(listener)); - } - - pub fn global(cx: &App) -> Entity { - GlobalRefreshLlmTokenListener::global(cx).0.clone() - } - - fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { - client.add_message_to_client_handler({ - let this = cx.weak_entity(); - move |message, cx| { - if let Some(this) = this.upgrade() { - Self::handle_refresh_llm_token(this, message, cx); - } - } - }); - - let subscription = cx.subscribe(&user_store, |this, _user_store, event, cx| { - if matches!(event, client::user::Event::OrganizationChanged) { - this.refresh(TokenRefreshMode::ClearAndRefresh, cx); - } - }); - - Self { - client, - user_store, - llm_api_token: LlmApiToken::default(), - _subscription: subscription, - } - } - - fn refresh(&self, mode: TokenRefreshMode, cx: &mut Context) { - let client = self.client.clone(); - let llm_api_token = self.llm_api_token.clone(); - let organization_id = self - .user_store - .read(cx) - .current_organization() - .map(|organization| organization.id.clone()); - cx.spawn(async move |this, cx| { - match mode { - TokenRefreshMode::Refresh => { - llm_api_token.refresh(&client, organization_id).await?; - } - TokenRefreshMode::ClearAndRefresh => { - llm_api_token - .clear_and_refresh(&client, organization_id) - .await?; - } - } - this.update(cx, |_this, cx| cx.emit(LlmTokenRefreshedEvent)) - }) - .detach_and_log_err(cx); - } - - fn handle_refresh_llm_token(this: Entity, message: &MessageToClient, cx: &mut App) { - match message { - MessageToClient::UserUpdated => { - this.update(cx, |this, cx| this.refresh(TokenRefreshMode::Refresh, cx)); + Err(err) } } } diff --git a/crates/language_models/src/language_models.rs b/crates/language_models/src/language_models.rs index 4db1db8fa6ce5afb9d77a6685bfc0861d0fb8885..3154db91a43d1381f5b3f122a724be249adeb79b 100644 --- a/crates/language_models/src/language_models.rs +++ b/crates/language_models/src/language_models.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use ::settings::{Settings, SettingsStore}; use client::{Client, UserStore}; use collections::HashSet; +use credentials_provider::CredentialsProvider; use gpui::{App, Context, Entity}; use language_model::{LanguageModelProviderId, LanguageModelRegistry}; use provider::deepseek::DeepSeekLanguageModelProvider; @@ -31,9 +32,16 @@ use crate::provider::x_ai::XAiLanguageModelProvider; pub use crate::settings::*; pub fn init(user_store: Entity, client: Arc, cx: &mut App) { + let credentials_provider = client.credentials_provider(); let registry = LanguageModelRegistry::global(cx); registry.update(cx, |registry, cx| { - register_language_model_providers(registry, user_store, client.clone(), cx); + register_language_model_providers( + registry, + user_store, + client.clone(), + credentials_provider.clone(), + cx, + ); }); // Subscribe to extension store events to track LLM extension installations @@ -104,6 +112,7 @@ pub fn init(user_store: Entity, client: Arc, cx: &mut App) { &HashSet::default(), &openai_compatible_providers, client.clone(), + credentials_provider.clone(), cx, ); }); @@ -124,6 +133,7 @@ pub fn init(user_store: Entity, client: Arc, cx: &mut App) { &openai_compatible_providers, &openai_compatible_providers_new, client.clone(), + credentials_provider.clone(), cx, ); }); @@ -138,6 +148,7 @@ fn register_openai_compatible_providers( old: &HashSet>, new: &HashSet>, client: Arc, + credentials_provider: Arc, cx: &mut Context, ) { for provider_id in old { @@ -152,6 +163,7 @@ fn register_openai_compatible_providers( Arc::new(OpenAiCompatibleLanguageModelProvider::new( provider_id.clone(), client.http_client(), + credentials_provider.clone(), cx, )), cx, @@ -164,6 +176,7 @@ fn register_language_model_providers( registry: &mut LanguageModelRegistry, user_store: Entity, client: Arc, + credentials_provider: Arc, cx: &mut Context, ) { registry.register_provider( @@ -177,62 +190,105 @@ fn register_language_model_providers( registry.register_provider( Arc::new(AnthropicLanguageModelProvider::new( client.http_client(), + credentials_provider.clone(), cx, )), cx, ); registry.register_provider( - Arc::new(OpenAiLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(OpenAiLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - Arc::new(OllamaLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(OllamaLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - Arc::new(LmStudioLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(LmStudioLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - Arc::new(DeepSeekLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(DeepSeekLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - Arc::new(GoogleLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(GoogleLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - MistralLanguageModelProvider::global(client.http_client(), cx), + MistralLanguageModelProvider::global( + client.http_client(), + credentials_provider.clone(), + cx, + ), cx, ); registry.register_provider( - Arc::new(BedrockLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(BedrockLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( Arc::new(OpenRouterLanguageModelProvider::new( client.http_client(), + credentials_provider.clone(), cx, )), cx, ); registry.register_provider( - Arc::new(VercelLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(VercelLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( Arc::new(VercelAiGatewayLanguageModelProvider::new( client.http_client(), + credentials_provider.clone(), cx, )), cx, ); registry.register_provider( - Arc::new(XAiLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(XAiLanguageModelProvider::new( + client.http_client(), + credentials_provider.clone(), + cx, + )), cx, ); registry.register_provider( - Arc::new(OpenCodeLanguageModelProvider::new(client.http_client(), cx)), + Arc::new(OpenCodeLanguageModelProvider::new( + client.http_client(), + credentials_provider, + cx, + )), cx, ); registry.register_provider(Arc::new(CopilotChatLanguageModelProvider::new(cx)), cx); diff --git a/crates/language_models/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs index a98a0ce142dfdbaaaddc056ab378455a45147830..c1b8bc1a3bb1b602b67ae5563d8acc3b05a94d47 100644 --- a/crates/language_models/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -6,6 +6,7 @@ use anthropic::{ }; use anyhow::Result; use collections::{BTreeMap, HashMap}; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream}; use gpui::{AnyView, App, AsyncApp, Context, Entity, Task}; use http_client::HttpClient; @@ -51,6 +52,7 @@ static API_KEY_ENV_VAR: LazyLock = env_var!(API_KEY_ENV_VAR_NAME); pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -59,30 +61,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = AnthropicLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = AnthropicLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl AnthropicLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index f53f145dbd387aa948b977d854ba77f1cbe49ded..4320763e2c5c6de7f3fe9238d7a4991565c3bfcd 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -195,12 +195,13 @@ pub struct State { settings: Option, /// Whether credentials came from environment variables (only relevant for static credentials) credentials_from_env: bool, + credentials_provider: Arc, _subscription: Subscription, } impl State { fn reset_auth(&self, cx: &mut Context) -> Task> { - let credentials_provider = ::global(cx); + let credentials_provider = self.credentials_provider.clone(); cx.spawn(async move |this, cx| { credentials_provider .delete_credentials(AMAZON_AWS_URL, cx) @@ -220,7 +221,7 @@ impl State { cx: &mut Context, ) -> Task> { let auth = credentials.clone().into_auth(); - let credentials_provider = ::global(cx); + let credentials_provider = self.credentials_provider.clone(); cx.spawn(async move |this, cx| { credentials_provider .write_credentials( @@ -287,7 +288,7 @@ impl State { &self, cx: &mut Context, ) -> Task> { - let credentials_provider = ::global(cx); + let credentials_provider = self.credentials_provider.clone(); cx.spawn(async move |this, cx| { // Try environment variables first let (auth, from_env) = if let Some(bearer_token) = &ZED_BEDROCK_BEARER_TOKEN_VAR.value { @@ -400,11 +401,16 @@ pub struct BedrockLanguageModelProvider { } impl BedrockLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| State { auth: None, settings: Some(AllLanguageModelSettings::get_global(cx).bedrock.clone()), credentials_from_env: false, + credentials_provider, _subscription: cx.observe_global::(|_, cx| { cx.notify(); }), diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index f9372a4d7ea9c078c58f633cc58bd5597ef49212..29623cc998ad0fe933e9a29c45c651f7be010b07 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -1,7 +1,9 @@ use ai_onboarding::YoungAccountBanner; use anthropic::AnthropicModelMode; use anyhow::{Context as _, Result, anyhow}; -use client::{Client, UserStore, zed_urls}; +use client::{ + Client, NeedsLlmTokenRefresh, RefreshLlmTokenListener, UserStore, global_llm_token, zed_urls, +}; use cloud_api_types::{OrganizationId, Plan}; use cloud_llm_client::{ CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, CLIENT_SUPPORTS_STATUS_STREAM_ENDED_HEADER_NAME, @@ -24,10 +26,9 @@ use language_model::{ LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelEffortLevel, LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, - LanguageModelToolChoice, LanguageModelToolSchemaFormat, LlmApiToken, NeedsLlmTokenRefresh, - OPEN_AI_PROVIDER_ID, OPEN_AI_PROVIDER_NAME, PaymentRequiredError, RateLimiter, - RefreshLlmTokenListener, X_AI_PROVIDER_ID, X_AI_PROVIDER_NAME, ZED_CLOUD_PROVIDER_ID, - ZED_CLOUD_PROVIDER_NAME, + LanguageModelToolChoice, LanguageModelToolSchemaFormat, LlmApiToken, OPEN_AI_PROVIDER_ID, + OPEN_AI_PROVIDER_NAME, PaymentRequiredError, RateLimiter, X_AI_PROVIDER_ID, X_AI_PROVIDER_NAME, + ZED_CLOUD_PROVIDER_ID, ZED_CLOUD_PROVIDER_NAME, }; use release_channel::AppVersion; use schemars::JsonSchema; @@ -111,7 +112,7 @@ impl State { cx: &mut Context, ) -> Self { let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); - let llm_api_token = LlmApiToken::global(cx); + let llm_api_token = global_llm_token(cx); Self { client: client.clone(), llm_api_token, @@ -226,7 +227,9 @@ impl State { organization_id: Option, ) -> Result { let http_client = &client.http_client(); - let token = llm_api_token.acquire(&client, organization_id).await?; + let token = client + .acquire_llm_token(&llm_api_token, organization_id) + .await?; let request = http_client::Request::builder() .method(Method::GET) @@ -414,8 +417,8 @@ impl CloudLanguageModel { ) -> Result { let http_client = &client.http_client(); - let mut token = llm_api_token - .acquire(&client, organization_id.clone()) + let mut token = client + .acquire_llm_token(&llm_api_token, organization_id.clone()) .await?; let mut refreshed_token = false; @@ -447,8 +450,8 @@ impl CloudLanguageModel { } if !refreshed_token && response.needs_llm_token_refresh() { - token = llm_api_token - .refresh(&client, organization_id.clone()) + token = client + .refresh_llm_token(&llm_api_token, organization_id.clone()) .await?; refreshed_token = true; continue; @@ -713,7 +716,9 @@ impl LanguageModel for CloudLanguageModel { into_google(request, model_id.clone(), GoogleModelMode::Default); async move { let http_client = &client.http_client(); - let token = llm_api_token.acquire(&client, organization_id).await?; + let token = client + .acquire_llm_token(&llm_api_token, organization_id) + .await?; let request_body = CountTokensBody { provider: cloud_llm_client::LanguageModelProvider::Google, diff --git a/crates/language_models/src/provider/deepseek.rs b/crates/language_models/src/provider/deepseek.rs index bd2469d865fd8421d6ad31208e6a4be413c0fe14..0cfb1af425c7cb0279d98fa124a589437f1bb1a1 100644 --- a/crates/language_models/src/provider/deepseek.rs +++ b/crates/language_models/src/provider/deepseek.rs @@ -1,5 +1,6 @@ use anyhow::{Result, anyhow}; use collections::{BTreeMap, HashMap}; +use credentials_provider::CredentialsProvider; use deepseek::DEEPSEEK_API_URL; use futures::Stream; @@ -49,6 +50,7 @@ pub struct DeepSeekLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -57,30 +59,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = DeepSeekLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = DeepSeekLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl DeepSeekLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_models/src/provider/google.rs b/crates/language_models/src/provider/google.rs index 8fdfb514ac6e872bd24968d33f2c1169401d5a9c..244f7835a85ff67f0c4826321910ea13516371cb 100644 --- a/crates/language_models/src/provider/google.rs +++ b/crates/language_models/src/provider/google.rs @@ -1,5 +1,6 @@ use anyhow::{Context as _, Result}; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture}; use google_ai::{ FunctionDeclaration, GenerateContentResponse, GoogleModelMode, Part, SystemInstruction, @@ -60,6 +61,7 @@ pub struct GoogleLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } const GEMINI_API_KEY_VAR_NAME: &str = "GEMINI_API_KEY"; @@ -76,30 +78,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = GoogleLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = GoogleLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl GoogleLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_models/src/provider/lmstudio.rs b/crates/language_models/src/provider/lmstudio.rs index 6c8d3c6e1c50185a4b09e9afc80c688f4c8d1381..0d60fef16791087e35bac7d846b2ec99821d5470 100644 --- a/crates/language_models/src/provider/lmstudio.rs +++ b/crates/language_models/src/provider/lmstudio.rs @@ -1,5 +1,6 @@ use anyhow::{Result, anyhow}; use collections::HashMap; +use credentials_provider::CredentialsProvider; use fs::Fs; use futures::Stream; use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream}; @@ -52,6 +53,7 @@ pub struct LmStudioLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, http_client: Arc, available_models: Vec, fetch_model_task: Option>>, @@ -64,10 +66,15 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = LmStudioLanguageModelProvider::api_url(cx).into(); - let task = self - .api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx); + let task = self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); self.restart_fetch_models_task(cx); task } @@ -114,10 +121,14 @@ impl State { } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = LmStudioLanguageModelProvider::api_url(cx).into(); - let _task = self - .api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx); + let _task = self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); if self.is_authenticated() { return Task::ready(Ok(())); @@ -152,16 +163,29 @@ impl State { } impl LmStudioLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let this = Self { http_client: http_client.clone(), state: cx.new(|cx| { let subscription = cx.observe_global::({ let mut settings = AllLanguageModelSettings::get_global(cx).lmstudio.clone(); move |this: &mut State, cx| { - let new_settings = &AllLanguageModelSettings::get_global(cx).lmstudio; - if &settings != new_settings { - settings = new_settings.clone(); + let new_settings = + AllLanguageModelSettings::get_global(cx).lmstudio.clone(); + if settings != new_settings { + let credentials_provider = this.credentials_provider.clone(); + let api_url = Self::api_url(cx).into(); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); + settings = new_settings; this.restart_fetch_models_task(cx); cx.notify(); } @@ -173,6 +197,7 @@ impl LmStudioLanguageModelProvider { Self::api_url(cx).into(), (*API_KEY_ENV_VAR).clone(), ), + credentials_provider, http_client, available_models: Default::default(), fetch_model_task: None, diff --git a/crates/language_models/src/provider/mistral.rs b/crates/language_models/src/provider/mistral.rs index 72f0cae2993da4efb3e19cb19ec42b186290920d..4cd1375fe50cd792a3a7bc8c85ba7b5b5af9520a 100644 --- a/crates/language_models/src/provider/mistral.rs +++ b/crates/language_models/src/provider/mistral.rs @@ -1,5 +1,6 @@ use anyhow::{Result, anyhow}; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture, stream::BoxStream}; use gpui::{AnyView, App, AsyncApp, Context, Entity, Global, SharedString, Task, Window}; @@ -43,6 +44,7 @@ pub struct MistralLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -51,15 +53,26 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = MistralLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = MistralLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } @@ -73,20 +86,30 @@ impl MistralLanguageModelProvider { .map(|this| &this.0) } - pub fn global(http_client: Arc, cx: &mut App) -> Arc { + pub fn global( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Arc { if let Some(this) = cx.try_global::() { return this.0.clone(); } let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index 551fcd55358c11bdf64bf2f27b32fa9a7f702252..49c326683a225bf73f604a584307ea1316a710c4 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -1,4 +1,5 @@ use anyhow::{Result, anyhow}; +use credentials_provider::CredentialsProvider; use fs::Fs; use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream}; use futures::{Stream, TryFutureExt, stream}; @@ -54,6 +55,7 @@ pub struct OllamaLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, http_client: Arc, fetched_models: Vec, fetch_model_task: Option>>, @@ -65,10 +67,15 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OllamaLanguageModelProvider::api_url(cx); - let task = self - .api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx); + let task = self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); self.fetched_models.clear(); cx.spawn(async move |this, cx| { @@ -80,10 +87,14 @@ impl State { } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OllamaLanguageModelProvider::api_url(cx); - let task = self - .api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx); + let task = self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); // Always try to fetch models - if no API key is needed (local Ollama), it will work // If API key is needed and provided, it will work @@ -157,7 +168,11 @@ impl State { } impl OllamaLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let this = Self { http_client: http_client.clone(), state: cx.new(|cx| { @@ -170,6 +185,14 @@ impl OllamaLanguageModelProvider { let url_changed = last_settings.api_url != current_settings.api_url; last_settings = current_settings.clone(); if url_changed { + let credentials_provider = this.credentials_provider.clone(); + let api_url = Self::api_url(cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); this.fetched_models.clear(); this.authenticate(cx).detach(); } @@ -184,6 +207,7 @@ impl OllamaLanguageModelProvider { fetched_models: Default::default(), fetch_model_task: None, api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }), }; diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 9289c66b2a4c9213826d2d027555511c9746d00e..6a2313487f4a1922cdc2aa20d23ede01c4b7d158 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -1,5 +1,6 @@ use anyhow::{Result, anyhow}; use collections::{BTreeMap, HashMap}; +use credentials_provider::CredentialsProvider; use futures::Stream; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; @@ -55,6 +56,7 @@ pub struct OpenAiLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -63,30 +65,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenAiLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenAiLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl OpenAiLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_models/src/provider/open_ai_compatible.rs b/crates/language_models/src/provider/open_ai_compatible.rs index 87a08097782198238a5d2467af32cc66b3183664..1c3268749c3340826cd2f50d29e80eecfa1826d4 100644 --- a/crates/language_models/src/provider/open_ai_compatible.rs +++ b/crates/language_models/src/provider/open_ai_compatible.rs @@ -1,5 +1,6 @@ use anyhow::Result; use convert_case::{Case, Casing}; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::HttpClient; @@ -44,6 +45,7 @@ pub struct State { id: Arc, api_key_state: ApiKeyState, settings: OpenAiCompatibleSettings, + credentials_provider: Arc, } impl State { @@ -52,20 +54,36 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = SharedString::new(self.settings.api_url.as_str()); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = SharedString::new(self.settings.api_url.clone()); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl OpenAiCompatibleLanguageModelProvider { - pub fn new(id: Arc, http_client: Arc, cx: &mut App) -> Self { + pub fn new( + id: Arc, + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { fn resolve_settings<'a>(id: &'a str, cx: &'a App) -> Option<&'a OpenAiCompatibleSettings> { crate::AllLanguageModelSettings::get_global(cx) .openai_compatible @@ -79,10 +97,12 @@ impl OpenAiCompatibleLanguageModelProvider { return; }; if &this.settings != &settings { + let credentials_provider = this.credentials_provider.clone(); let api_url = SharedString::new(settings.api_url.as_str()); this.api_key_state.handle_url_change( api_url, |this| &mut this.api_key_state, + credentials_provider, cx, ); this.settings = settings; @@ -98,6 +118,7 @@ impl OpenAiCompatibleLanguageModelProvider { EnvVar::new(api_key_env_var_name), ), settings, + credentials_provider, } }); @@ -381,7 +402,7 @@ impl LanguageModel for OpenAiCompatibleLanguageModel { self.model.capabilities.parallel_tool_calls, self.model.capabilities.prompt_cache_key, self.max_output_tokens(), - None, + self.model.reasoning_effort.clone(), ); let completions = self.stream_completion(request, cx); async move { @@ -396,7 +417,7 @@ impl LanguageModel for OpenAiCompatibleLanguageModel { self.model.capabilities.parallel_tool_calls, self.model.capabilities.prompt_cache_key, self.max_output_tokens(), - None, + self.model.reasoning_effort.clone(), ); let completions = self.stream_response(request, cx); async move { diff --git a/crates/language_models/src/provider/open_router.rs b/crates/language_models/src/provider/open_router.rs index a4a679be73c0276351a6524ad7e8fc40e2c26860..09c8eb768d12c61ed1dc86a1251ad52114be6162 100644 --- a/crates/language_models/src/provider/open_router.rs +++ b/crates/language_models/src/provider/open_router.rs @@ -1,5 +1,6 @@ use anyhow::Result; use collections::HashMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task}; use http_client::HttpClient; @@ -42,6 +43,7 @@ pub struct OpenRouterLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, http_client: Arc, available_models: Vec, fetch_models_task: Option>>, @@ -53,16 +55,26 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenRouterLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenRouterLanguageModelProvider::api_url(cx); - let task = self - .api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx); + let task = self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.spawn(async move |this, cx| { let result = task.await; @@ -114,7 +126,11 @@ impl State { } impl OpenRouterLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::({ let mut last_settings = OpenRouterLanguageModelProvider::settings(cx).clone(); @@ -131,6 +147,7 @@ impl OpenRouterLanguageModelProvider { .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, http_client: http_client.clone(), available_models: Vec::new(), fetch_models_task: None, diff --git a/crates/language_models/src/provider/opencode.rs b/crates/language_models/src/provider/opencode.rs index f3953f3cafa4a1f59ff86004628c0a4022f6257e..aae3a552544ebf2cc59255da954d84cf7b78c7da 100644 --- a/crates/language_models/src/provider/opencode.rs +++ b/crates/language_models/src/provider/opencode.rs @@ -1,5 +1,6 @@ use anyhow::Result; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::HttpClient; @@ -43,6 +44,7 @@ pub struct OpenCodeLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -51,30 +53,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenCodeLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = OpenCodeLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl OpenCodeLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_models/src/provider/vercel.rs b/crates/language_models/src/provider/vercel.rs index b71da5b7db05710ee30115ab54379c9ee4e4c750..cedbc9c3cb988375b90864ceb23a3b14fc50abdd 100644 --- a/crates/language_models/src/provider/vercel.rs +++ b/crates/language_models/src/provider/vercel.rs @@ -1,5 +1,6 @@ use anyhow::Result; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::HttpClient; @@ -38,6 +39,7 @@ pub struct VercelLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -46,30 +48,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = VercelLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = VercelLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl VercelLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_models/src/provider/vercel_ai_gateway.rs b/crates/language_models/src/provider/vercel_ai_gateway.rs index 78f900de0c94fd3bbbff3962e92d1a8cb9f3e118..66767edd809531b4b020263654922d742a1a04be 100644 --- a/crates/language_models/src/provider/vercel_ai_gateway.rs +++ b/crates/language_models/src/provider/vercel_ai_gateway.rs @@ -1,5 +1,6 @@ use anyhow::Result; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{AsyncReadExt, FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest, http}; @@ -41,6 +42,7 @@ pub struct VercelAiGatewayLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, http_client: Arc, available_models: Vec, fetch_models_task: Option>>, @@ -52,16 +54,26 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); - let task = self - .api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx); + let task = self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.spawn(async move |this, cx| { let result = task.await; @@ -100,7 +112,11 @@ impl State { } impl VercelAiGatewayLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::({ let mut last_settings = VercelAiGatewayLanguageModelProvider::settings(cx).clone(); @@ -116,6 +132,7 @@ impl VercelAiGatewayLanguageModelProvider { .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, http_client: http_client.clone(), available_models: Vec::new(), fetch_models_task: None, diff --git a/crates/language_models/src/provider/x_ai.rs b/crates/language_models/src/provider/x_ai.rs index c00637bce7e67b624f5cdcae9aebe43fb43971f8..88189864c7b4b650a24afb2b872c1d6105cf9782 100644 --- a/crates/language_models/src/provider/x_ai.rs +++ b/crates/language_models/src/provider/x_ai.rs @@ -1,5 +1,6 @@ use anyhow::Result; use collections::BTreeMap; +use credentials_provider::CredentialsProvider; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, Task, Window}; use http_client::HttpClient; @@ -39,6 +40,7 @@ pub struct XAiLanguageModelProvider { pub struct State { api_key_state: ApiKeyState, + credentials_provider: Arc, } impl State { @@ -47,30 +49,51 @@ impl State { } fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = XAiLanguageModelProvider::api_url(cx); - self.api_key_state - .store(api_url, api_key, |this| &mut this.api_key_state, cx) + self.api_key_state.store( + api_url, + api_key, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } fn authenticate(&mut self, cx: &mut Context) -> Task> { + let credentials_provider = self.credentials_provider.clone(); let api_url = XAiLanguageModelProvider::api_url(cx); - self.api_key_state - .load_if_needed(api_url, |this| &mut this.api_key_state, cx) + self.api_key_state.load_if_needed( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ) } } impl XAiLanguageModelProvider { - pub fn new(http_client: Arc, cx: &mut App) -> Self { + pub fn new( + http_client: Arc, + credentials_provider: Arc, + cx: &mut App, + ) -> Self { let state = cx.new(|cx| { cx.observe_global::(|this: &mut State, cx| { + let credentials_provider = this.credentials_provider.clone(); let api_url = Self::api_url(cx); - this.api_key_state - .handle_url_change(api_url, |this| &mut this.api_key_state, cx); + this.api_key_state.handle_url_change( + api_url, + |this| &mut this.api_key_state, + credentials_provider, + cx, + ); cx.notify(); }) .detach(); State { api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + credentials_provider, } }); diff --git a/crates/language_selector/src/active_buffer_language.rs b/crates/language_selector/src/active_buffer_language.rs index c75c3954cc6590c2e0cb4326c073ed004eaac280..1f280282af933094cf46cd9e7ab790efd07b8a12 100644 --- a/crates/language_selector/src/active_buffer_language.rs +++ b/crates/language_selector/src/active_buffer_language.rs @@ -29,7 +29,7 @@ impl ActiveBufferLanguage { self.active_language = Some(None); let editor = editor.read(cx); - if let Some((_, buffer, _)) = editor.active_excerpt(cx) + if let Some(buffer) = editor.active_buffer(cx) && let Some(language) = buffer.read(cx).language() { self.active_language = Some(Some(language.name())); diff --git a/crates/language_selector/src/language_selector.rs b/crates/language_selector/src/language_selector.rs index e5e6a2e264dbb923390e05b283fe341a3336af97..70a03514f45371d58d0a8ee0a14eb87565d3a514 100644 --- a/crates/language_selector/src/language_selector.rs +++ b/crates/language_selector/src/language_selector.rs @@ -51,11 +51,11 @@ impl LanguageSelector { cx: &mut Context, ) -> Option<()> { let registry = workspace.app_state().languages.clone(); - let (_, buffer, _) = workspace + let buffer = workspace .active_item(cx)? .act_as::(cx)? .read(cx) - .active_excerpt(cx)?; + .active_buffer(cx)?; let project = workspace.project().clone(); workspace.toggle_modal(window, cx, move |window, cx| { @@ -414,10 +414,10 @@ mod tests { ) -> Entity { let editor = open_new_buffer_editor(workspace, project, cx).await; // Ensure the buffer has no language after the editor is created - let (_, buffer, _) = editor.read_with(cx, |editor, cx| { + let buffer = editor.read_with(cx, |editor, cx| { editor - .active_excerpt(cx) - .expect("editor should have an active excerpt") + .active_buffer(cx) + .expect("editor should have an active buffer") }); buffer.update(cx, |buffer, cx| { buffer.set_language(None, cx); @@ -454,8 +454,8 @@ mod tests { .await .expect("language should exist in registry"); editor.update(cx, move |editor, cx| { - let (_, buffer, _) = editor - .active_excerpt(cx) + let buffer = editor + .active_buffer(cx) .expect("editor should have an active excerpt"); buffer.update(cx, |buffer, cx| { buffer.set_language(Some(language), cx); @@ -578,6 +578,15 @@ mod tests { assert_selected_language_for_editor(&workspace, &rust_editor, Some("Rust"), cx); assert_selected_language_for_editor(&workspace, &typescript_editor, Some("TypeScript"), cx); + // Ensure the empty editor's buffer has no language before asserting + let buffer = empty_editor.read_with(cx, |editor, cx| { + editor + .active_buffer(cx) + .expect("editor should have an active excerpt") + }); + buffer.update(cx, |buffer, cx| { + buffer.set_language(None, cx); + }); assert_selected_language_for_editor(&workspace, &empty_editor, None, cx); } diff --git a/crates/language_tools/src/highlights_tree_view.rs b/crates/language_tools/src/highlights_tree_view.rs index c2f684c11dc148c8f66b6cf20e0ca06e40905db7..763cdf76dab46a7fc1c233eda84cfb4ab50e6975 100644 --- a/crates/language_tools/src/highlights_tree_view.rs +++ b/crates/language_tools/src/highlights_tree_view.rs @@ -1,5 +1,5 @@ use editor::{ - Anchor, Editor, ExcerptId, HighlightKey, MultiBufferSnapshot, SelectionEffects, ToPoint, + Anchor, Editor, HighlightKey, MultiBufferSnapshot, SelectionEffects, ToPoint, scroll::Autoscroll, }; use gpui::{ @@ -8,8 +8,7 @@ use gpui::{ MouseDownEvent, MouseMoveEvent, ParentElement, Render, ScrollStrategy, SharedString, Styled, Task, UniformListScrollHandle, WeakEntity, Window, actions, div, rems, uniform_list, }; -use language::ToOffset; - +use language::{BufferId, Point, ToOffset}; use menu::{SelectNext, SelectPrevious}; use std::{mem, ops::Range}; use theme::ActiveTheme; @@ -114,12 +113,12 @@ impl HighlightCategory { #[derive(Debug, Clone)] struct HighlightEntry { - excerpt_id: ExcerptId, range: Range, + buffer_id: BufferId, + buffer_point_range: Range, range_display: SharedString, style: HighlightStyle, category: HighlightCategory, - sort_key: (ExcerptId, u32, u32, u32, u32), } /// An item in the display list: either a separator between excerpts or a highlight entry. @@ -319,20 +318,18 @@ impl HighlightsTreeView { display_map.update(cx, |display_map, cx| { for (key, text_highlights) in display_map.all_text_highlights() { for range in &text_highlights.1 { - let excerpt_id = range.start.excerpt_id; - let (range_display, sort_key) = format_anchor_range( - range, - excerpt_id, - &multi_buffer_snapshot, - is_singleton, - ); + let Some((range_display, buffer_id, buffer_point_range)) = + format_anchor_range(range, &multi_buffer_snapshot) + else { + continue; + }; entries.push(HighlightEntry { - excerpt_id, range: range.clone(), + buffer_id, range_display, style: text_highlights.0, category: HighlightCategory::Text(*key), - sort_key, + buffer_point_range, }); } } @@ -345,13 +342,11 @@ impl HighlightsTreeView { .and_then(|buf| buf.read(cx).language().map(|l| l.name())); for token in tokens.iter() { let range = token.range.start..token.range.end; - let excerpt_id = range.start.excerpt_id; - let (range_display, sort_key) = format_anchor_range( - &range, - excerpt_id, - &multi_buffer_snapshot, - is_singleton, - ); + let Some((range_display, entry_buffer_id, buffer_point_range)) = + format_anchor_range(&range, &multi_buffer_snapshot) + else { + continue; + }; let Some(stylizer) = lsp_store.get_or_create_token_stylizer( token.server_id, language_name.as_ref(), @@ -388,8 +383,8 @@ impl HighlightsTreeView { }); entries.push(HighlightEntry { - excerpt_id, range, + buffer_id: entry_buffer_id, range_display, style: interner[token.style], category: HighlightCategory::SemanticToken { @@ -399,7 +394,7 @@ impl HighlightsTreeView { .map(SharedString::from), theme_key, }, - sort_key, + buffer_point_range, }); } } @@ -407,7 +402,13 @@ impl HighlightsTreeView { }); let syntax_theme = cx.theme().syntax().clone(); - for (excerpt_id, buffer_snapshot, excerpt_range) in multi_buffer_snapshot.excerpts() { + for excerpt_range in multi_buffer_snapshot.excerpts() { + let Some(buffer_snapshot) = + multi_buffer_snapshot.buffer_for_id(excerpt_range.context.start.buffer_id) + else { + continue; + }; + let start_offset = excerpt_range.context.start.to_offset(buffer_snapshot); let end_offset = excerpt_range.context.end.to_offset(buffer_snapshot); let range = start_offset..end_offset; @@ -419,7 +420,10 @@ impl HighlightsTreeView { let highlight_maps: Vec<_> = grammars.iter().map(|g| g.highlight_map()).collect(); for capture in captures { - let highlight_id = highlight_maps[capture.grammar_index].get(capture.index); + let Some(highlight_id) = highlight_maps[capture.grammar_index].get(capture.index) + else { + continue; + }; let Some(style) = syntax_theme.get(highlight_id).cloned() else { continue; }; @@ -438,8 +442,8 @@ impl HighlightsTreeView { let start_anchor = buffer_snapshot.anchor_before(capture.node.start_byte()); let end_anchor = buffer_snapshot.anchor_after(capture.node.end_byte()); - let start = multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, start_anchor); - let end = multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, end_anchor); + let start = multi_buffer_snapshot.anchor_in_excerpt(start_anchor); + let end = multi_buffer_snapshot.anchor_in_excerpt(end_anchor); let (start, end) = match (start, end) { (Some(s), Some(e)) => (s, e), @@ -447,29 +451,38 @@ impl HighlightsTreeView { }; let range = start..end; - let (range_display, sort_key) = - format_anchor_range(&range, excerpt_id, &multi_buffer_snapshot, is_singleton); + let Some((range_display, buffer_id, buffer_point_range)) = + format_anchor_range(&range, &multi_buffer_snapshot) + else { + continue; + }; entries.push(HighlightEntry { - excerpt_id, range, + buffer_id, range_display, style, category: HighlightCategory::SyntaxToken { capture_name, theme_key, }, - sort_key, + buffer_point_range, }); } } entries.sort_by(|a, b| { - a.sort_key - .cmp(&b.sort_key) + a.buffer_id + .cmp(&b.buffer_id) + .then_with(|| a.buffer_point_range.start.cmp(&b.buffer_point_range.start)) + .then_with(|| a.buffer_point_range.end.cmp(&b.buffer_point_range.end)) .then_with(|| a.category.cmp(&b.category)) }); - entries.dedup_by(|a, b| a.sort_key == b.sort_key && a.category == b.category); + entries.dedup_by(|a, b| { + a.buffer_id == b.buffer_id + && a.buffer_point_range == b.buffer_point_range + && a.category == b.category + }); self.cached_entries = entries; self.rebuild_display_items(&multi_buffer_snapshot, cx); @@ -485,7 +498,7 @@ impl HighlightsTreeView { fn rebuild_display_items(&mut self, snapshot: &MultiBufferSnapshot, cx: &App) { self.display_items.clear(); - let mut last_excerpt_id: Option = None; + let mut last_range_end: Option = None; for (entry_ix, entry) in self.cached_entries.iter().enumerate() { if !self.should_show_entry(entry) { @@ -493,11 +506,14 @@ impl HighlightsTreeView { } if !self.is_singleton { - let excerpt_changed = - last_excerpt_id.is_none_or(|last_id| last_id != entry.excerpt_id); + let excerpt_changed = last_range_end.is_none_or(|anchor| { + snapshot + .excerpt_containing(anchor..entry.range.start) + .is_none() + }); if excerpt_changed { - last_excerpt_id = Some(entry.excerpt_id); - let label = excerpt_label_for(entry.excerpt_id, snapshot, cx); + last_range_end = Some(entry.range.end); + let label = excerpt_label_for(entry, snapshot, cx); self.display_items .push(DisplayItem::ExcerptSeparator { label }); } @@ -516,10 +532,6 @@ impl HighlightsTreeView { } fn scroll_to_cursor_position(&mut self, cursor: &Anchor, snapshot: &MultiBufferSnapshot) { - let cursor_point = cursor.to_point(snapshot); - let cursor_key = (cursor_point.row, cursor_point.column); - let cursor_excerpt = cursor.excerpt_id; - let best = self .display_items .iter() @@ -532,17 +544,18 @@ impl HighlightsTreeView { _ => None, }) .filter(|(_, _, entry)| { - let (excerpt_id, start_row, start_col, end_row, end_col) = entry.sort_key; - if !self.is_singleton && excerpt_id != cursor_excerpt { - return false; - } - let start = (start_row, start_col); - let end = (end_row, end_col); - cursor_key >= start && cursor_key <= end + entry.range.start.cmp(&cursor, snapshot).is_le() + && cursor.cmp(&entry.range.end, snapshot).is_lt() }) .min_by_key(|(_, _, entry)| { - let (_, start_row, start_col, end_row, end_col) = entry.sort_key; - (end_row - start_row, end_col.saturating_sub(start_col)) + ( + entry.buffer_point_range.end.row - entry.buffer_point_range.start.row, + entry + .buffer_point_range + .end + .column + .saturating_sub(entry.buffer_point_range.start.column), + ) }) .map(|(display_ix, entry_ix, _)| (display_ix, entry_ix)); @@ -1076,12 +1089,13 @@ impl ToolbarItemView for HighlightsTreeToolbarItemView { } fn excerpt_label_for( - excerpt_id: ExcerptId, + entry: &HighlightEntry, snapshot: &MultiBufferSnapshot, cx: &App, ) -> SharedString { - let buffer = snapshot.buffer_for_excerpt(excerpt_id); - let path_label = buffer + let path_label = snapshot + .anchor_to_buffer_anchor(entry.range.start) + .and_then(|(anchor, _)| snapshot.buffer_for_id(anchor.buffer_id)) .and_then(|buf| buf.file()) .map(|file| { let full_path = file.full_path(cx); @@ -1093,50 +1107,21 @@ fn excerpt_label_for( fn format_anchor_range( range: &Range, - excerpt_id: ExcerptId, snapshot: &MultiBufferSnapshot, - is_singleton: bool, -) -> (SharedString, (ExcerptId, u32, u32, u32, u32)) { - if is_singleton { - let start = range.start.to_point(snapshot); - let end = range.end.to_point(snapshot); - let display = SharedString::from(format!( - "[{}:{} - {}:{}]", - start.row + 1, - start.column + 1, - end.row + 1, - end.column + 1, - )); - let sort_key = (excerpt_id, start.row, start.column, end.row, end.column); - (display, sort_key) - } else { - let buffer = snapshot.buffer_for_excerpt(excerpt_id); - if let Some(buffer) = buffer { - let start = language::ToPoint::to_point(&range.start.text_anchor, buffer); - let end = language::ToPoint::to_point(&range.end.text_anchor, buffer); - let display = SharedString::from(format!( - "[{}:{} - {}:{}]", - start.row + 1, - start.column + 1, - end.row + 1, - end.column + 1, - )); - let sort_key = (excerpt_id, start.row, start.column, end.row, end.column); - (display, sort_key) - } else { - let start = range.start.to_point(snapshot); - let end = range.end.to_point(snapshot); - let display = SharedString::from(format!( - "[{}:{} - {}:{}]", - start.row + 1, - start.column + 1, - end.row + 1, - end.column + 1, - )); - let sort_key = (excerpt_id, start.row, start.column, end.row, end.column); - (display, sort_key) - } - } +) -> Option<(SharedString, BufferId, Range)> { + let start = range.start.to_point(snapshot); + let end = range.end.to_point(snapshot); + let ((start_buffer, start), (_, end)) = snapshot + .point_to_buffer_point(start) + .zip(snapshot.point_to_buffer_point(end))?; + let display = SharedString::from(format!( + "[{}:{} - {}:{}]", + start.row + 1, + start.column + 1, + end.row + 1, + end.column + 1, + )); + Some((display, start_buffer.remote_id(), start..end)) } fn render_style_preview(style: HighlightStyle, selected: bool, cx: &App) -> Div { diff --git a/crates/language_tools/src/lsp_button.rs b/crates/language_tools/src/lsp_button.rs index 59b14d470003f3a8a4f45b7b2b3e51505f562e56..43b1736223478fe29f45aac0a712fafad1d2dcbe 100644 --- a/crates/language_tools/src/lsp_button.rs +++ b/crates/language_tools/src/lsp_button.rs @@ -1179,13 +1179,20 @@ impl StatusItemView for LspButton { .and_then(|active_editor| active_editor.editor.upgrade()) .as_ref() { - let editor_buffers = - HashSet::from_iter(editor.read(cx).buffer().read(cx).excerpt_buffer_ids()); + let editor_buffers = HashSet::from_iter( + editor + .read(cx) + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id), + ); let _editor_subscription = cx.subscribe_in( &editor, window, |lsp_button, _, e: &EditorEvent, window, cx| match e { - EditorEvent::ExcerptsAdded { buffer, .. } => { + EditorEvent::BufferRangesUpdated { buffer, .. } => { let updated = lsp_button.server_state.update(cx, |state, cx| { if let Some(active_editor) = state.active_editor.as_mut() { let buffer_id = buffer.read(cx).remote_id(); @@ -1198,9 +1205,7 @@ impl StatusItemView for LspButton { lsp_button.refresh_lsp_menu(false, window, cx); } } - EditorEvent::ExcerptsRemoved { - removed_buffer_ids, .. - } => { + EditorEvent::BuffersRemoved { removed_buffer_ids } => { let removed = lsp_button.server_state.update(cx, |state, _| { let mut removed = false; if let Some(active_editor) = state.active_editor.as_mut() { diff --git a/crates/language_tools/src/lsp_log_view.rs b/crates/language_tools/src/lsp_log_view.rs index ff1ec56b41ccf12ce6e497c21439aea5c97c3d39..97f0676d250cac2cee54b307e7c07d894d3d3128 100644 --- a/crates/language_tools/src/lsp_log_view.rs +++ b/crates/language_tools/src/lsp_log_view.rs @@ -880,6 +880,7 @@ impl SearchableItem for LspLogView { // LSP log is read-only. replacement: false, selection: false, + select_all: true, } } fn active_match_index( diff --git a/crates/language_tools/src/syntax_tree_view.rs b/crates/language_tools/src/syntax_tree_view.rs index b44d2e05d90733469a5385c2695b3fda3ff47c5e..9c751dd8eaf71272b649b037425caa4aa73b39cc 100644 --- a/crates/language_tools/src/syntax_tree_view.rs +++ b/crates/language_tools/src/syntax_tree_view.rs @@ -1,7 +1,6 @@ use command_palette_hooks::CommandPaletteFilter; use editor::{ - Anchor, Editor, ExcerptId, HighlightKey, MultiBufferOffset, SelectionEffects, - scroll::Autoscroll, + Anchor, Editor, HighlightKey, MultiBufferOffset, SelectionEffects, scroll::Autoscroll, }; use gpui::{ App, AppContext as _, Context, Div, Entity, EntityId, EventEmitter, FocusHandle, Focusable, @@ -125,7 +124,6 @@ impl EditorState { #[derive(Clone)] struct BufferState { buffer: Entity, - excerpt_id: ExcerptId, active_layer: Option, } @@ -253,18 +251,18 @@ impl SyntaxTreeView { let snapshot = editor_state .editor .update(cx, |editor, cx| editor.snapshot(window, cx)); - let (buffer, range, excerpt_id) = editor_state.editor.update(cx, |editor, cx| { + let (buffer, range) = editor_state.editor.update(cx, |editor, cx| { let selection_range = editor .selections .last::(&editor.display_snapshot(cx)) .range(); let multi_buffer = editor.buffer().read(cx); - let (buffer, range, excerpt_id) = snapshot + let (buffer, range, _) = snapshot .buffer_snapshot() - .range_to_buffer_ranges(selection_range.start..=selection_range.end) + .range_to_buffer_ranges(selection_range.start..selection_range.end) .pop()?; let buffer = multi_buffer.buffer(buffer.remote_id()).unwrap(); - Some((buffer, range, excerpt_id)) + Some((buffer, range)) })?; // If the cursor has moved into a different excerpt, retrieve a new syntax layer @@ -273,16 +271,14 @@ impl SyntaxTreeView { .active_buffer .get_or_insert_with(|| BufferState { buffer: buffer.clone(), - excerpt_id, active_layer: None, }); let mut prev_layer = None; if did_reparse { prev_layer = buffer_state.active_layer.take(); } - if buffer_state.buffer != buffer || buffer_state.excerpt_id != excerpt_id { + if buffer_state.buffer != buffer { buffer_state.buffer = buffer.clone(); - buffer_state.excerpt_id = excerpt_id; buffer_state.active_layer = None; } @@ -360,8 +356,7 @@ impl SyntaxTreeView { // Build a multibuffer anchor range. let multibuffer = editor_state.editor.read(cx).buffer(); let multibuffer = multibuffer.read(cx).snapshot(cx); - let excerpt_id = buffer_state.excerpt_id; - let range = multibuffer.anchor_range_in_excerpt(excerpt_id, range)?; + let range = multibuffer.buffer_anchor_range_to_anchor_range(range)?; let key = cx.entity_id().as_u64() as usize; // Update the editor with the anchor range. diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index 3bb8826d555308145847d47525cba9de84a6aa89..d92c1392c128ed72b6e2972bc54dcf7dfc152b1e 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -1542,10 +1542,10 @@ mod tests { "await.as_deref_mut(&mut self) -> IterMut<'_, T>".to_string(), 6..18, vec![ - (6..18, HighlightId(2)), - (20..23, HighlightId(1)), - (33..40, HighlightId(0)), - (45..46, HighlightId(0)) + (6..18, HighlightId::new(2)), + (20..23, HighlightId::new(1)), + (33..40, HighlightId::new(0)), + (45..46, HighlightId::new(0)) ], )) ); @@ -1572,12 +1572,12 @@ mod tests { "pub fn as_deref_mut(&mut self) -> IterMut<'_, T>".to_string(), 7..19, vec![ - (0..3, HighlightId(1)), - (4..6, HighlightId(1)), - (7..19, HighlightId(2)), - (21..24, HighlightId(1)), - (34..41, HighlightId(0)), - (46..47, HighlightId(0)) + (0..3, HighlightId::new(1)), + (4..6, HighlightId::new(1)), + (7..19, HighlightId::new(2)), + (21..24, HighlightId::new(1)), + (34..41, HighlightId::new(0)), + (46..47, HighlightId::new(0)) ], )) ); @@ -1598,7 +1598,7 @@ mod tests { Some(CodeLabel::new( "inner_value: String".to_string(), 6..11, - vec![(0..11, HighlightId(3)), (13..19, HighlightId(0))], + vec![(0..11, HighlightId::new(3)), (13..19, HighlightId::new(0))], )) ); @@ -1625,8 +1625,8 @@ mod tests { vec![ (10..13, HighlightId::TABSTOP_INSERT_ID), (16..19, HighlightId::TABSTOP_INSERT_ID), - (0..7, HighlightId(2)), - (7..8, HighlightId(2)), + (0..7, HighlightId::new(2)), + (7..8, HighlightId::new(2)), ], )) ); @@ -1653,8 +1653,8 @@ mod tests { 0..4, vec![ (5..9, HighlightId::TABSTOP_REPLACE_ID), - (0..3, HighlightId(2)), - (3..4, HighlightId(2)), + (0..3, HighlightId::new(2)), + (3..4, HighlightId::new(2)), ], )) ); @@ -1682,8 +1682,8 @@ mod tests { vec![ (7..10, HighlightId::TABSTOP_REPLACE_ID), (13..16, HighlightId::TABSTOP_INSERT_ID), - (0..2, HighlightId(1)), - (3..6, HighlightId(1)), + (0..2, HighlightId::new(1)), + (3..6, HighlightId::new(1)), ], )) ); @@ -1711,8 +1711,8 @@ mod tests { vec![ (4..8, HighlightId::TABSTOP_REPLACE_ID), (12..16, HighlightId::TABSTOP_REPLACE_ID), - (0..3, HighlightId(1)), - (9..11, HighlightId(1)), + (0..3, HighlightId::new(1)), + (9..11, HighlightId::new(1)), ], )) ); diff --git a/crates/line_ending_selector/src/line_ending_indicator.rs b/crates/line_ending_selector/src/line_ending_indicator.rs index ee858d706b3a8152c868a5bd629c112a4d1b225f..9c493344e757174035a30e42126389ced9ea1624 100644 --- a/crates/line_ending_selector/src/line_ending_indicator.rs +++ b/crates/line_ending_selector/src/line_ending_indicator.rs @@ -18,7 +18,7 @@ impl LineEndingIndicator { self.line_ending = None; self.active_editor = None; - if let Some((_, buffer, _)) = editor.read(cx).active_excerpt(cx) { + if let Some(buffer) = editor.read(cx).active_buffer(cx) { let line_ending = buffer.read(cx).line_ending(); self.line_ending = Some(line_ending); self.active_editor = Some(editor.downgrade()); diff --git a/crates/line_ending_selector/src/line_ending_selector.rs b/crates/line_ending_selector/src/line_ending_selector.rs index 504c327a349c97214e801f6bd375d61c7847f2be..455807565f8be52e574327f10d5881bb575c60f3 100644 --- a/crates/line_ending_selector/src/line_ending_selector.rs +++ b/crates/line_ending_selector/src/line_ending_selector.rs @@ -40,7 +40,7 @@ impl LineEndingSelector { fn toggle(editor: &WeakEntity, window: &mut Window, cx: &mut App) { let Some((workspace, buffer)) = editor .update(cx, |editor, cx| { - Some((editor.workspace()?, editor.active_excerpt(cx)?.1)) + Some((editor.workspace()?, editor.active_buffer(cx)?)) }) .ok() .flatten() diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index c31ca79e7581926e7696fa596aaccc9371512841..247c082d223005a7e0bd6d57696751ce76cc4d86 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -154,6 +154,8 @@ impl MarkdownStyle { base_text_style: text_style.clone(), syntax: cx.theme().syntax().clone(), selection_background_color: colors.element_selection_background, + rule_color: colors.border, + block_quote_border_color: colors.border, code_block_overflow_x_scroll: true, heading_level_styles: Some(HeadingLevelStyles { h1: Some(TextStyleRefinement { @@ -261,6 +263,8 @@ pub struct Markdown { copied_code_blocks: HashSet, code_block_scroll_handles: BTreeMap, context_menu_selected_text: Option, + search_highlights: Vec>, + active_search_highlight: Option, } #[derive(Clone, Copy, Default)] @@ -314,6 +318,78 @@ actions!( ] ); +enum EscapeAction { + PassThrough, + Nbsp(usize), + DoubleNewline, + PrefixBackslash, +} + +impl EscapeAction { + fn output_len(&self) -> usize { + match self { + Self::PassThrough => 1, + Self::Nbsp(count) => count * '\u{00A0}'.len_utf8(), + Self::DoubleNewline => 2, + Self::PrefixBackslash => 2, + } + } + + fn write_to(&self, c: char, output: &mut String) { + match self { + Self::PassThrough => output.push(c), + Self::Nbsp(count) => { + for _ in 0..*count { + output.push('\u{00A0}'); + } + } + Self::DoubleNewline => { + output.push('\n'); + output.push('\n'); + } + Self::PrefixBackslash => { + // '\\' is a single backslash in Rust, e.g. '|' -> '\|' + output.push('\\'); + output.push(c); + } + } + } +} + +// Valid to operate on raw bytes since multi-byte UTF-8 +// sequences never contain ASCII-range bytes. +struct MarkdownEscaper { + in_leading_whitespace: bool, +} + +impl MarkdownEscaper { + const TAB_SIZE: usize = 4; + + fn new() -> Self { + Self { + in_leading_whitespace: true, + } + } + + fn next(&mut self, byte: u8) -> EscapeAction { + let action = if self.in_leading_whitespace && byte == b'\t' { + EscapeAction::Nbsp(Self::TAB_SIZE) + } else if self.in_leading_whitespace && byte == b' ' { + EscapeAction::Nbsp(1) + } else if byte == b'\n' { + EscapeAction::DoubleNewline + } else if byte.is_ascii_punctuation() { + EscapeAction::PrefixBackslash + } else { + EscapeAction::PassThrough + }; + + self.in_leading_whitespace = + byte == b'\n' || (self.in_leading_whitespace && (byte == b' ' || byte == b'\t')); + action + } +} + impl Markdown { pub fn new( source: SharedString, @@ -356,6 +432,8 @@ impl Markdown { copied_code_blocks: HashSet::default(), code_block_scroll_handles: BTreeMap::default(), context_menu_selected_text: None, + search_highlights: Vec::new(), + active_search_highlight: None, }; this.parse(cx); this @@ -467,6 +545,8 @@ impl Markdown { self.autoscroll_request = None; self.pending_parse = None; self.should_reparse = false; + self.search_highlights.clear(); + self.active_search_highlight = None; // Don't clear parsed_markdown here - keep existing content visible until new parse completes self.parse(cx); } @@ -477,30 +557,21 @@ impl Markdown { } pub fn escape(s: &str) -> Cow<'_, str> { - // Valid to use bytes since multi-byte UTF-8 doesn't use ASCII chars. - let count = s - .bytes() - .filter(|c| *c == b'\n' || c.is_ascii_punctuation()) - .count(); - if count > 0 { - let mut output = String::with_capacity(s.len() + count); - let mut is_newline = false; - for c in s.chars() { - if is_newline && c == ' ' { - continue; - } - is_newline = c == '\n'; - if c == '\n' { - output.push('\n') - } else if c.is_ascii_punctuation() { - output.push('\\') - } - output.push(c) - } - output.into() - } else { - s.into() + let output_len: usize = { + let mut escaper = MarkdownEscaper::new(); + s.bytes().map(|byte| escaper.next(byte).output_len()).sum() + }; + + if output_len == s.len() { + return s.into(); } + + let mut escaper = MarkdownEscaper::new(); + let mut output = String::with_capacity(output_len); + for c in s.chars() { + escaper.next(c as u8).write_to(c, &mut output); + } + output.into() } pub fn selected_text(&self) -> Option { @@ -511,6 +582,40 @@ impl Markdown { } } + pub fn set_search_highlights( + &mut self, + highlights: Vec>, + active: Option, + cx: &mut Context, + ) { + self.search_highlights = highlights; + self.active_search_highlight = active; + cx.notify(); + } + + pub fn clear_search_highlights(&mut self, cx: &mut Context) { + if !self.search_highlights.is_empty() || self.active_search_highlight.is_some() { + self.search_highlights.clear(); + self.active_search_highlight = None; + cx.notify(); + } + } + + pub fn set_active_search_highlight(&mut self, active: Option, cx: &mut Context) { + if self.active_search_highlight != active { + self.active_search_highlight = active; + cx.notify(); + } + } + + pub fn search_highlights(&self) -> &[Range] { + &self.search_highlights + } + + pub fn active_search_highlight(&self) -> Option { + self.active_search_highlight + } + fn copy(&self, text: &RenderedText, _: &mut Window, cx: &mut Context) { if self.selection.end <= self.selection.start { return; @@ -1019,18 +1124,18 @@ impl MarkdownElement { builder.pop_div(); } - fn paint_selection( - &self, + fn paint_highlight_range( bounds: Bounds, + start: usize, + end: usize, + color: Hsla, rendered_text: &RenderedText, window: &mut Window, - cx: &mut App, ) { - let selection = self.markdown.read(cx).selection.clone(); - let selection_start = rendered_text.position_for_source_index(selection.start); - let selection_end = rendered_text.position_for_source_index(selection.end); + let start_pos = rendered_text.position_for_source_index(start); + let end_pos = rendered_text.position_for_source_index(end); if let Some(((start_position, start_line_height), (end_position, end_line_height))) = - selection_start.zip(selection_end) + start_pos.zip(end_pos) { if start_position.y == end_position.y { window.paint_quad(quad( @@ -1039,7 +1144,7 @@ impl MarkdownElement { point(end_position.x, end_position.y + end_line_height), ), Pixels::ZERO, - self.style.selection_background_color, + color, Edges::default(), Hsla::transparent_black(), BorderStyle::default(), @@ -1051,7 +1156,7 @@ impl MarkdownElement { point(bounds.right(), start_position.y + start_line_height), ), Pixels::ZERO, - self.style.selection_background_color, + color, Edges::default(), Hsla::transparent_black(), BorderStyle::default(), @@ -1064,7 +1169,7 @@ impl MarkdownElement { point(bounds.right(), end_position.y), ), Pixels::ZERO, - self.style.selection_background_color, + color, Edges::default(), Hsla::transparent_black(), BorderStyle::default(), @@ -1077,7 +1182,7 @@ impl MarkdownElement { point(end_position.x, end_position.y + end_line_height), ), Pixels::ZERO, - self.style.selection_background_color, + color, Edges::default(), Hsla::transparent_black(), BorderStyle::default(), @@ -1086,6 +1191,52 @@ impl MarkdownElement { } } + fn paint_selection( + &self, + bounds: Bounds, + rendered_text: &RenderedText, + window: &mut Window, + cx: &mut App, + ) { + let selection = self.markdown.read(cx).selection.clone(); + Self::paint_highlight_range( + bounds, + selection.start, + selection.end, + self.style.selection_background_color, + rendered_text, + window, + ); + } + + fn paint_search_highlights( + &self, + bounds: Bounds, + rendered_text: &RenderedText, + window: &mut Window, + cx: &mut App, + ) { + let markdown = self.markdown.read(cx); + let active_index = markdown.active_search_highlight; + let colors = cx.theme().colors(); + + for (i, highlight_range) in markdown.search_highlights.iter().enumerate() { + let color = if Some(i) == active_index { + colors.search_active_match_background + } else { + colors.search_match_background + }; + Self::paint_highlight_range( + bounds, + highlight_range.start, + highlight_range.end, + color, + rendered_text, + window, + ); + } + } + fn paint_mouse_listeners( &mut self, hitbox: &Hitbox, @@ -1890,6 +2041,7 @@ impl Element for MarkdownElement { self.paint_mouse_listeners(hitbox, &rendered_markdown.text, window, cx); rendered_markdown.element.paint(window, cx); + self.paint_search_highlights(bounds, &rendered_markdown.text, window, cx); self.paint_selection(bounds, &rendered_markdown.text, window, cx); } } @@ -3077,15 +3229,120 @@ mod tests { ); } + fn nbsp(n: usize) -> String { + "\u{00A0}".repeat(n) + } + + #[test] + fn test_escape_plain_text() { + assert_eq!(Markdown::escape("hello world"), "hello world"); + assert_eq!(Markdown::escape(""), ""); + assert_eq!(Markdown::escape("café ☕ naïve"), "café ☕ naïve"); + } + + #[test] + fn test_escape_punctuation() { + assert_eq!(Markdown::escape("hello `world`"), r"hello \`world\`"); + assert_eq!(Markdown::escape("a|b"), r"a\|b"); + } + + #[test] + fn test_escape_leading_spaces() { + assert_eq!(Markdown::escape(" hello"), [ (4), "hello"].concat()); + assert_eq!( + Markdown::escape(" | { a: string }"), + [ (4), r"\| \{ a\: string \}"].concat() + ); + assert_eq!( + Markdown::escape(" first\n second"), + [ (2), "first\n\n",  (2), "second"].concat() + ); + assert_eq!(Markdown::escape("hello world"), "hello world"); + } + + #[test] + fn test_escape_leading_tabs() { + assert_eq!(Markdown::escape("\thello"), [ (4), "hello"].concat()); + assert_eq!( + Markdown::escape("hello\n\t\tindented"), + ["hello\n\n",  (8), "indented"].concat() + ); + assert_eq!( + Markdown::escape(" \t hello"), + [ (1 + 4 + 1), "hello"].concat() + ); + assert_eq!(Markdown::escape("hello\tworld"), "hello\tworld"); + } + #[test] - fn test_escape() { - assert_eq!(Markdown::escape("hello `world`"), "hello \\`world\\`"); + fn test_escape_newlines() { + assert_eq!(Markdown::escape("a\nb"), "a\n\nb"); + assert_eq!(Markdown::escape("a\n\nb"), "a\n\n\n\nb"); + assert_eq!(Markdown::escape("\nhello"), "\n\nhello"); + } + + #[test] + fn test_escape_multiline_diagnostic() { assert_eq!( - Markdown::escape("hello\n cool world"), - "hello\n\ncool world" + Markdown::escape(" | { a: string }\n | { b: number }"), + [ +  (4), + r"\| \{ a\: string \}", + "\n\n", +  (4), + r"\| \{ b\: number \}", + ] + .concat() ); } + fn has_code_block(markdown: &str) -> bool { + let parsed_data = parse_markdown_with_options(markdown, false); + parsed_data + .events + .iter() + .any(|(_, event)| matches!(event, MarkdownEvent::Start(MarkdownTag::CodeBlock { .. }))) + } + + #[test] + fn test_escape_output_len_matches_precomputed() { + let cases = [ + "", + "hello world", + "hello `world`", + " hello", + " | { a: string }", + "\thello", + "hello\n\t\tindented", + " \t hello", + "hello\tworld", + "a\nb", + "a\n\nb", + "\nhello", + " | { a: string }\n | { b: number }", + "café ☕ naïve", + ]; + for input in cases { + let mut escaper = MarkdownEscaper::new(); + let precomputed: usize = input.bytes().map(|b| escaper.next(b).output_len()).sum(); + + let mut escaper = MarkdownEscaper::new(); + let mut output = String::new(); + for c in input.chars() { + escaper.next(c as u8).write_to(c, &mut output); + } + + assert_eq!(precomputed, output.len(), "length mismatch for {:?}", input); + } + } + + #[test] + fn test_escape_prevents_code_block() { + let diagnostic = " | { a: string }"; + assert!(has_code_block(diagnostic)); + assert!(!has_code_block(&Markdown::escape(diagnostic))); + } + #[track_caller] fn assert_mappings(rendered: &RenderedText, expected: Vec>) { assert_eq!(rendered.lines.len(), expected.len(), "line count mismatch"); diff --git a/crates/markdown_preview/Cargo.toml b/crates/markdown_preview/Cargo.toml index 19f1270bb91e8a7e9e660a62d8191a9d12b66641..3a07b258c5bd17ef2da02820ef2e724f7389ce13 100644 --- a/crates/markdown_preview/Cargo.toml +++ b/crates/markdown_preview/Cargo.toml @@ -21,6 +21,7 @@ gpui.workspace = true language.workspace = true log.workspace = true markdown.workspace = true +project.workspace = true settings.workspace = true theme_settings.workspace = true ui.workspace = true diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index 6dbf44c20f3ce453a7ef711e1854b806cf29737a..3e6423b36603e247ba5da2a2166a8357701fa5cd 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -1,4 +1,5 @@ use std::cmp::min; +use std::ops::Range; use std::path::{Path, PathBuf}; use std::sync::Arc; use std::time::Duration; @@ -16,11 +17,15 @@ use markdown::{ CodeBlockRenderer, CopyButtonVisibility, Markdown, MarkdownElement, MarkdownFont, MarkdownOptions, MarkdownStyle, }; +use project::search::SearchQuery; use settings::Settings; use theme_settings::ThemeSettings; use ui::{WithScrollbar, prelude::*}; use util::normalize_path; -use workspace::item::{Item, ItemHandle}; +use workspace::item::{Item, ItemBufferKind, ItemHandle}; +use workspace::searchable::{ + Direction, SearchEvent, SearchOptions, SearchToken, SearchableItem, SearchableItemHandle, +}; use workspace::{OpenOptions, OpenVisible, Pane, Workspace}; use crate::{ @@ -295,7 +300,7 @@ impl MarkdownPreviewView { EditorEvent::Edited { .. } | EditorEvent::BufferEdited { .. } | EditorEvent::DirtyChanged - | EditorEvent::ExcerptsEdited { .. } => { + | EditorEvent::BuffersEdited { .. } => { this.update_markdown_from_active_editor(true, false, window, cx); } EditorEvent::SelectionsChanged { .. } => { @@ -382,6 +387,7 @@ impl MarkdownPreviewView { markdown.reset(contents, cx); }); view.sync_preview_to_source_index(selection_start, should_reveal_selection, cx); + cx.emit(SearchEvent::MatchesInvalidated); } view.pending_update_task = None; cx.notify(); @@ -751,6 +757,7 @@ impl Focusable for MarkdownPreviewView { } impl EventEmitter<()> for MarkdownPreviewView {} +impl EventEmitter for MarkdownPreviewView {} impl Item for MarkdownPreviewView { type Event = (); @@ -775,6 +782,18 @@ impl Item for MarkdownPreviewView { } fn to_item_events(_event: &Self::Event, _f: &mut dyn FnMut(workspace::item::ItemEvent)) {} + + fn buffer_kind(&self, _cx: &App) -> ItemBufferKind { + ItemBufferKind::Singleton + } + + fn as_searchable( + &self, + handle: &Entity, + _: &App, + ) -> Option> { + Some(Box::new(handle.clone())) + } } impl Render for MarkdownPreviewView { @@ -807,6 +826,140 @@ impl Render for MarkdownPreviewView { } } +impl SearchableItem for MarkdownPreviewView { + type Match = Range; + + fn supported_options(&self) -> SearchOptions { + SearchOptions { + case: true, + word: true, + regex: true, + replacement: false, + selection: false, + select_all: false, + find_in_results: false, + } + } + + fn get_matches(&self, _window: &mut Window, cx: &mut App) -> (Vec, SearchToken) { + ( + self.markdown.read(cx).search_highlights().to_vec(), + SearchToken::default(), + ) + } + + fn clear_matches(&mut self, _window: &mut Window, cx: &mut Context) { + let had_highlights = !self.markdown.read(cx).search_highlights().is_empty(); + self.markdown.update(cx, |markdown, cx| { + markdown.clear_search_highlights(cx); + }); + if had_highlights { + cx.emit(SearchEvent::MatchesInvalidated); + } + } + + fn update_matches( + &mut self, + matches: &[Self::Match], + active_match_index: Option, + _token: SearchToken, + _window: &mut Window, + cx: &mut Context, + ) { + let old_highlights = self.markdown.read(cx).search_highlights(); + let changed = old_highlights != matches; + self.markdown.update(cx, |markdown, cx| { + markdown.set_search_highlights(matches.to_vec(), active_match_index, cx); + }); + if changed { + cx.emit(SearchEvent::MatchesInvalidated); + } + } + + fn query_suggestion(&mut self, _window: &mut Window, cx: &mut Context) -> String { + self.markdown.read(cx).selected_text().unwrap_or_default() + } + + fn activate_match( + &mut self, + index: usize, + matches: &[Self::Match], + _token: SearchToken, + _window: &mut Window, + cx: &mut Context, + ) { + if let Some(match_range) = matches.get(index) { + let start = match_range.start; + self.markdown.update(cx, |markdown, cx| { + markdown.set_active_search_highlight(Some(index), cx); + markdown.request_autoscroll_to_source_index(start, cx); + }); + cx.emit(SearchEvent::ActiveMatchChanged); + } + } + + fn select_matches( + &mut self, + _matches: &[Self::Match], + _token: SearchToken, + _window: &mut Window, + _cx: &mut Context, + ) { + } + + fn replace( + &mut self, + _: &Self::Match, + _: &SearchQuery, + _token: SearchToken, + _window: &mut Window, + _: &mut Context, + ) { + } + + fn find_matches( + &mut self, + query: Arc, + _window: &mut Window, + cx: &mut Context, + ) -> Task> { + let source = self.markdown.read(cx).source().to_string(); + cx.background_spawn(async move { query.search_str(&source) }) + } + + fn active_match_index( + &mut self, + direction: Direction, + matches: &[Self::Match], + _token: SearchToken, + _window: &mut Window, + cx: &mut Context, + ) -> Option { + if matches.is_empty() { + return None; + } + + let markdown = self.markdown.read(cx); + let current_source_index = markdown + .active_search_highlight() + .and_then(|i| markdown.search_highlights().get(i)) + .map(|m| m.start) + .or(self.active_source_index) + .unwrap_or(0); + + match direction { + Direction::Next => matches + .iter() + .position(|m| m.start >= current_source_index) + .or(Some(0)), + Direction::Prev => matches + .iter() + .rposition(|m| m.start <= current_source_index) + .or(Some(matches.len().saturating_sub(1))), + } + } +} + #[cfg(test)] mod tests { use crate::markdown_preview_view::ImageSource; diff --git a/crates/migrator/src/migrations.rs b/crates/migrator/src/migrations.rs index d554ee1dd887d6048f55a584ed2534db944b3c08..625bd27e91e117662f9a47edaaac2ddaa7d2ba1c 100644 --- a/crates/migrator/src/migrations.rs +++ b/crates/migrator/src/migrations.rs @@ -316,3 +316,15 @@ pub(crate) mod m_2026_03_23 { pub(crate) use keymap::KEYMAP_PATTERNS; } + +pub(crate) mod m_2026_03_30 { + mod settings; + + pub(crate) use settings::make_play_sound_when_agent_done_an_enum; +} + +pub(crate) mod m_2026_04_01 { + mod settings; + + pub(crate) use settings::restructure_profiles_with_settings_key; +} diff --git a/crates/migrator/src/migrations/m_2026_03_30/settings.rs b/crates/migrator/src/migrations/m_2026_03_30/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..598941a6212442a4562814d43df6184e4eb76640 --- /dev/null +++ b/crates/migrator/src/migrations/m_2026_03_30/settings.rs @@ -0,0 +1,29 @@ +use anyhow::Result; +use serde_json::Value; + +use crate::migrations::migrate_settings; + +pub fn make_play_sound_when_agent_done_an_enum(value: &mut Value) -> Result<()> { + migrate_settings(value, &mut migrate_one) +} + +fn migrate_one(obj: &mut serde_json::Map) -> Result<()> { + let Some(play_sound) = obj + .get_mut("agent") + .and_then(|agent| agent.as_object_mut()) + .and_then(|agent| agent.get_mut("play_sound_when_agent_done")) + else { + return Ok(()); + }; + + *play_sound = match play_sound { + Value::Bool(true) => Value::String("always".to_string()), + Value::Bool(false) => Value::String("never".to_string()), + Value::String(s) if s == "never" || s == "when_hidden" || s == "always" => return Ok(()), + _ => { + anyhow::bail!("Expected play_sound_when_agent_done to be a boolean or valid enum value") + } + }; + + Ok(()) +} diff --git a/crates/migrator/src/migrations/m_2026_04_01/settings.rs b/crates/migrator/src/migrations/m_2026_04_01/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..240572fa7754e29d43b23f178115878a99760729 --- /dev/null +++ b/crates/migrator/src/migrations/m_2026_04_01/settings.rs @@ -0,0 +1,29 @@ +use anyhow::Result; +use serde_json::Value; + +pub fn restructure_profiles_with_settings_key(value: &mut Value) -> Result<()> { + let Some(root_object) = value.as_object_mut() else { + return Ok(()); + }; + + let Some(profiles) = root_object.get_mut("profiles") else { + return Ok(()); + }; + + let Some(profiles_map) = profiles.as_object_mut() else { + return Ok(()); + }; + + for profile_value in profiles_map.values_mut() { + if profile_value + .as_object() + .is_some_and(|m| m.contains_key("settings") || m.contains_key("base")) + { + continue; + } + + *profile_value = serde_json::json!({ "settings": profile_value }); + } + + Ok(()) +} diff --git a/crates/migrator/src/migrator.rs b/crates/migrator/src/migrator.rs index ceb6ec2e0e35f0dd3bbd23174637bba00baab6b3..f49d102213c446be17c7d240d272cf4b516d912c 100644 --- a/crates/migrator/src/migrator.rs +++ b/crates/migrator/src/migrator.rs @@ -247,6 +247,8 @@ pub fn migrate_settings(text: &str) -> Result> { migrations::m_2026_03_16::SETTINGS_PATTERNS, &SETTINGS_QUERY_2026_03_16, ), + MigrationType::Json(migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum), + MigrationType::Json(migrations::m_2026_04_01::restructure_profiles_with_settings_key), ]; run_migrations(text, migrations) } @@ -2400,6 +2402,132 @@ mod tests { ); } + #[test] + fn test_make_play_sound_when_agent_done_an_enum() { + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#"{ }"#.unindent(), + None, + ); + + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#"{ + "agent": { + "play_sound_when_agent_done": true + } + }"# + .unindent(), + Some( + &r#"{ + "agent": { + "play_sound_when_agent_done": "always" + } + }"# + .unindent(), + ), + ); + + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#"{ + "agent": { + "play_sound_when_agent_done": false + } + }"# + .unindent(), + Some( + &r#"{ + "agent": { + "play_sound_when_agent_done": "never" + } + }"# + .unindent(), + ), + ); + + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#"{ + "agent": { + "play_sound_when_agent_done": "when_hidden" + } + }"# + .unindent(), + None, + ); + + // Platform key: settings nested inside "macos" should be migrated + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#" + { + "macos": { + "agent": { + "play_sound_when_agent_done": true + } + } + } + "# + .unindent(), + Some( + &r#" + { + "macos": { + "agent": { + "play_sound_when_agent_done": "always" + } + } + } + "# + .unindent(), + ), + ); + + // Profile: settings nested inside profiles should be migrated + assert_migrate_with_migrations( + &[MigrationType::Json( + migrations::m_2026_03_30::make_play_sound_when_agent_done_an_enum, + )], + &r#" + { + "profiles": { + "work": { + "agent": { + "play_sound_when_agent_done": false + } + } + } + } + "# + .unindent(), + Some( + &r#" + { + "profiles": { + "work": { + "agent": { + "play_sound_when_agent_done": "never" + } + } + } + } + "# + .unindent(), + ), + ); + } + #[test] fn test_remove_context_server_source() { assert_migrate_settings( @@ -4480,4 +4608,78 @@ mod tests { ), ); } + + #[test] + fn test_restructure_profiles_with_settings_key() { + assert_migrate_settings( + &r#" + { + "buffer_font_size": 14, + "profiles": { + "Presenting": { + "buffer_font_size": 20, + "theme": "One Light" + }, + "Minimal": { + "vim_mode": true + } + } + } + "# + .unindent(), + Some( + &r#" + { + "buffer_font_size": 14, + "profiles": { + "Presenting": { + "settings": { + "buffer_font_size": 20, + "theme": "One Light" + } + }, + "Minimal": { + "settings": { + "vim_mode": true + } + } + } + } + "# + .unindent(), + ), + ); + } + + #[test] + fn test_restructure_profiles_with_settings_key_already_migrated() { + assert_migrate_settings( + &r#" + { + "profiles": { + "Presenting": { + "settings": { + "buffer_font_size": 20 + } + } + } + } + "# + .unindent(), + None, + ); + } + + #[test] + fn test_restructure_profiles_with_settings_key_no_profiles() { + assert_migrate_settings( + &r#" + { + "buffer_font_size": 14 + } + "# + .unindent(), + None, + ); + } } diff --git a/crates/multi_buffer/src/anchor.rs b/crates/multi_buffer/src/anchor.rs index cf4df9f53ccd2ca86fc6c064d51b7557404dd251..08b159effafa2f34dbf1b10768bf356aaf74ae31 100644 --- a/crates/multi_buffer/src/anchor.rs +++ b/crates/multi_buffer/src/anchor.rs @@ -1,192 +1,331 @@ -use crate::{MultiBufferDimension, MultiBufferOffset, MultiBufferOffsetUtf16}; +use crate::{ + ExcerptSummary, MultiBufferDimension, MultiBufferOffset, MultiBufferOffsetUtf16, PathKey, + PathKeyIndex, find_diff_state, +}; -use super::{ExcerptId, MultiBufferSnapshot, ToOffset, ToPoint}; -use language::Point; +use super::{MultiBufferSnapshot, ToOffset, ToPoint}; +use language::{BufferSnapshot, Point}; use std::{ cmp::Ordering, ops::{Add, AddAssign, Range, Sub}, }; use sum_tree::Bias; +use text::BufferId; + +/// A multibuffer anchor derived from an anchor into a specific excerpted buffer. +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +pub struct ExcerptAnchor { + pub(crate) text_anchor: text::Anchor, + pub(crate) path: PathKeyIndex, + pub(crate) diff_base_anchor: Option, +} /// A stable reference to a position within a [`MultiBuffer`](super::MultiBuffer). /// /// Unlike simple offsets, anchors remain valid as the text is edited, automatically /// adjusting to reflect insertions and deletions around them. #[derive(Clone, Copy, Eq, PartialEq, Hash)] -pub struct Anchor { - /// Identifies which excerpt within the multi-buffer this anchor belongs to. - /// A multi-buffer can contain multiple excerpts from different buffers. - pub excerpt_id: ExcerptId, - /// The position within the excerpt's underlying buffer. This is a stable - /// reference that remains valid as the buffer text is edited. - pub text_anchor: text::Anchor, - /// When present, indicates this anchor points into deleted text within an - /// expanded diff hunk. The anchor references a position in the diff base - /// (original) text rather than the current buffer text. This is used when - /// displaying inline diffs where deleted lines are shown. - pub diff_base_anchor: Option, +pub enum Anchor { + /// An anchor that always resolves to the start of the multibuffer. + Min, + /// An anchor that's attached to a specific excerpted buffer. + Excerpt(ExcerptAnchor), + /// An anchor that always resolves to the end of the multibuffer. + Max, } -impl std::fmt::Debug for Anchor { +pub(crate) enum AnchorSeekTarget { + Excerpt { + path_key: PathKey, + anchor: ExcerptAnchor, + // None when the buffer no longer exists in the multibuffer + snapshot: Option, + }, + Empty, +} + +impl std::fmt::Debug for AnchorSeekTarget { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - if self.is_min() { - return write!(f, "Anchor::min({:?})", self.text_anchor.buffer_id); + match self { + Self::Excerpt { + path_key, + anchor, + snapshot: _, + } => f + .debug_struct("Excerpt") + .field("path_key", path_key) + .field("anchor", anchor) + .finish(), + Self::Empty => write!(f, "Empty"), } - if self.is_max() { - return write!(f, "Anchor::max({:?})", self.text_anchor.buffer_id); + } +} + +impl std::fmt::Debug for Anchor { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Anchor::Min => write!(f, "Anchor::Min"), + Anchor::Max => write!(f, "Anchor::Max"), + Anchor::Excerpt(excerpt_anchor) => write!(f, "{excerpt_anchor:?}"), } + } +} - f.debug_struct("Anchor") - .field("excerpt_id", &self.excerpt_id) - .field("text_anchor", &self.text_anchor) - .field("diff_base_anchor", &self.diff_base_anchor) - .finish() +impl From for Anchor { + fn from(anchor: ExcerptAnchor) -> Self { + Anchor::Excerpt(anchor) } } -impl Anchor { - pub fn with_diff_base_anchor(self, diff_base_anchor: text::Anchor) -> Self { - Self { - diff_base_anchor: Some(diff_base_anchor), - ..self +impl ExcerptAnchor { + pub(crate) fn buffer_id(&self) -> BufferId { + self.text_anchor.buffer_id + } + + pub(crate) fn text_anchor(&self) -> text::Anchor { + self.text_anchor + } + + pub(crate) fn with_diff_base_anchor(mut self, diff_base_anchor: text::Anchor) -> Self { + self.diff_base_anchor = Some(diff_base_anchor); + self + } + + pub(crate) fn cmp(&self, other: &Self, snapshot: &MultiBufferSnapshot) -> Ordering { + let Some(self_path_key) = snapshot.path_keys_by_index.get(&self.path) else { + panic!("anchor's path was never added to multibuffer") + }; + let Some(other_path_key) = snapshot.path_keys_by_index.get(&other.path) else { + panic!("anchor's path was never added to multibuffer") + }; + + if self_path_key.cmp(other_path_key) != Ordering::Equal { + return self_path_key.cmp(other_path_key); + } + + // in the case that you removed the buffer containing self, + // and added the buffer containing other with the same path key + // (ordering is arbitrary but consistent) + if self.text_anchor.buffer_id != other.text_anchor.buffer_id { + return self.text_anchor.buffer_id.cmp(&other.text_anchor.buffer_id); + } + + let Some(buffer) = snapshot.buffer_for_path(&self_path_key) else { + return Ordering::Equal; + }; + // Comparing two anchors into buffer A that formerly existed at path P, + // when path P has since been reused for a different buffer B + if buffer.remote_id() != self.text_anchor.buffer_id { + return Ordering::Equal; + }; + assert_eq!(self.text_anchor.buffer_id, buffer.remote_id()); + let text_cmp = self.text_anchor().cmp(&other.text_anchor(), buffer); + if text_cmp != Ordering::Equal { + return text_cmp; + } + + if (self.diff_base_anchor.is_some() || other.diff_base_anchor.is_some()) + && let Some(base_text) = find_diff_state(&snapshot.diffs, self.text_anchor.buffer_id) + .map(|diff| diff.base_text()) + { + let self_anchor = self.diff_base_anchor.filter(|a| a.is_valid(base_text)); + let other_anchor = other.diff_base_anchor.filter(|a| a.is_valid(base_text)); + return match (self_anchor, other_anchor) { + (Some(a), Some(b)) => a.cmp(&b, base_text), + (Some(_), None) => match other.text_anchor().bias { + Bias::Left => Ordering::Greater, + Bias::Right => Ordering::Less, + }, + (None, Some(_)) => match self.text_anchor().bias { + Bias::Left => Ordering::Less, + Bias::Right => Ordering::Greater, + }, + (None, None) => Ordering::Equal, + }; } + + Ordering::Equal } - pub fn in_buffer(excerpt_id: ExcerptId, text_anchor: text::Anchor) -> Self { - Self { - excerpt_id, - text_anchor, - diff_base_anchor: None, + fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Self { + if self.text_anchor.bias == Bias::Left { + return *self; + } + let Some(buffer) = snapshot.buffer_for_id(self.text_anchor.buffer_id) else { + return *self; + }; + let text_anchor = self.text_anchor().bias_left(&buffer); + let ret = Self::in_buffer(self.path, text_anchor); + if let Some(diff_base_anchor) = self.diff_base_anchor { + if let Some(diff) = find_diff_state(&snapshot.diffs, self.text_anchor.buffer_id) + && diff_base_anchor.is_valid(&diff.base_text()) + { + ret.with_diff_base_anchor(diff_base_anchor.bias_left(diff.base_text())) + } else { + ret.with_diff_base_anchor(diff_base_anchor) + } + } else { + ret } } - pub fn range_in_buffer(excerpt_id: ExcerptId, range: Range) -> Range { - Self::in_buffer(excerpt_id, range.start)..Self::in_buffer(excerpt_id, range.end) + fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Self { + if self.text_anchor.bias == Bias::Right { + return *self; + } + let Some(buffer) = snapshot.buffer_for_id(self.text_anchor.buffer_id) else { + return *self; + }; + let text_anchor = self.text_anchor().bias_right(&buffer); + let ret = Self::in_buffer(self.path, text_anchor); + if let Some(diff_base_anchor) = self.diff_base_anchor { + if let Some(diff) = find_diff_state(&snapshot.diffs, self.text_anchor.buffer_id) + && diff_base_anchor.is_valid(&diff.base_text()) + { + ret.with_diff_base_anchor(diff_base_anchor.bias_right(diff.base_text())) + } else { + ret.with_diff_base_anchor(diff_base_anchor) + } + } else { + ret + } } - pub fn min() -> Self { - Self { - excerpt_id: ExcerptId::min(), - text_anchor: text::Anchor::MIN, + #[track_caller] + pub(crate) fn in_buffer(path: PathKeyIndex, text_anchor: text::Anchor) -> Self { + ExcerptAnchor { + path, diff_base_anchor: None, + text_anchor, } } - pub fn max() -> Self { - Self { - excerpt_id: ExcerptId::max(), - text_anchor: text::Anchor::MAX, - diff_base_anchor: None, + fn is_valid(&self, snapshot: &MultiBufferSnapshot) -> bool { + let Some(target) = self.try_seek_target(snapshot) else { + return false; + }; + let Some(buffer_snapshot) = snapshot.buffer_for_id(self.buffer_id()) else { + return false; + }; + // Early check to avoid invalid comparisons when seeking + if !buffer_snapshot.can_resolve(&self.text_anchor) { + return false; } + let mut cursor = snapshot.excerpts.cursor::(()); + cursor.seek(&target, Bias::Left); + let Some(excerpt) = cursor.item() else { + return false; + }; + let is_valid = self.text_anchor == excerpt.range.context.start + || self.text_anchor == excerpt.range.context.end + || self.text_anchor.is_valid(&buffer_snapshot); + is_valid + && excerpt + .range + .context + .start + .cmp(&self.text_anchor(), buffer_snapshot) + .is_le() + && excerpt + .range + .context + .end + .cmp(&self.text_anchor(), buffer_snapshot) + .is_ge() + } + + pub(crate) fn seek_target(&self, snapshot: &MultiBufferSnapshot) -> AnchorSeekTarget { + self.try_seek_target(snapshot) + .expect("anchor is from different multi-buffer") + } + + pub(crate) fn try_seek_target( + &self, + snapshot: &MultiBufferSnapshot, + ) -> Option { + let path_key = snapshot.try_path_for_anchor(*self)?; + let buffer = snapshot.buffer_for_path(&path_key).cloned(); + Some(AnchorSeekTarget::Excerpt { + path_key, + anchor: *self, + snapshot: buffer, + }) + } +} + +impl ToOffset for ExcerptAnchor { + fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> MultiBufferOffset { + Anchor::from(*self).to_offset(snapshot) + } + + fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> MultiBufferOffsetUtf16 { + Anchor::from(*self).to_offset_utf16(snapshot) + } +} + +impl ToPoint for ExcerptAnchor { + fn to_point(&self, snapshot: &MultiBufferSnapshot) -> Point { + Anchor::from(*self).to_point(snapshot) } + fn to_point_utf16(&self, snapshot: &MultiBufferSnapshot) -> rope::PointUtf16 { + Anchor::from(*self).to_point_utf16(snapshot) + } +} + +impl Anchor { pub fn is_min(&self) -> bool { - self.excerpt_id == ExcerptId::min() - && self.text_anchor.is_min() - && self.diff_base_anchor.is_none() + matches!(self, Self::Min) } pub fn is_max(&self) -> bool { - self.excerpt_id == ExcerptId::max() - && self.text_anchor.is_max() - && self.diff_base_anchor.is_none() + matches!(self, Self::Max) } - pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering { - if self == other { - return Ordering::Equal; - } + pub(crate) fn in_buffer(path: PathKeyIndex, text_anchor: text::Anchor) -> Self { + Self::Excerpt(ExcerptAnchor::in_buffer(path, text_anchor)) + } - let self_excerpt_id = snapshot.latest_excerpt_id(self.excerpt_id); - let other_excerpt_id = snapshot.latest_excerpt_id(other.excerpt_id); + pub(crate) fn range_in_buffer(path: PathKeyIndex, range: Range) -> Range { + Self::in_buffer(path, range.start)..Self::in_buffer(path, range.end) + } - let excerpt_id_cmp = self_excerpt_id.cmp(&other_excerpt_id, snapshot); - if excerpt_id_cmp.is_ne() { - return excerpt_id_cmp; - } - if self_excerpt_id == ExcerptId::max() - && self.text_anchor.is_max() - && self.text_anchor.is_max() - && self.diff_base_anchor.is_none() - && other.diff_base_anchor.is_none() - { - return Ordering::Equal; - } - if let Some(excerpt) = snapshot.excerpt(self_excerpt_id) { - let text_cmp = self.text_anchor.cmp(&other.text_anchor, &excerpt.buffer); - if text_cmp.is_ne() { - return text_cmp; - } - if (self.diff_base_anchor.is_some() || other.diff_base_anchor.is_some()) - && let Some(base_text) = snapshot - .diff_state(excerpt.buffer_id) - .map(|diff| diff.base_text()) - { - let self_anchor = self.diff_base_anchor.filter(|a| a.is_valid(base_text)); - let other_anchor = other.diff_base_anchor.filter(|a| a.is_valid(base_text)); - return match (self_anchor, other_anchor) { - (Some(a), Some(b)) => a.cmp(&b, base_text), - (Some(_), None) => match other.text_anchor.bias { - Bias::Left => Ordering::Greater, - Bias::Right => Ordering::Less, - }, - (None, Some(_)) => match self.text_anchor.bias { - Bias::Left => Ordering::Less, - Bias::Right => Ordering::Greater, - }, - (None, None) => Ordering::Equal, - }; + pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering { + match (self, other) { + (Anchor::Min, Anchor::Min) => return Ordering::Equal, + (Anchor::Max, Anchor::Max) => return Ordering::Equal, + (Anchor::Min, _) => return Ordering::Less, + (Anchor::Max, _) => return Ordering::Greater, + (_, Anchor::Max) => return Ordering::Less, + (_, Anchor::Min) => return Ordering::Greater, + (Anchor::Excerpt(self_excerpt_anchor), Anchor::Excerpt(other_excerpt_anchor)) => { + self_excerpt_anchor.cmp(other_excerpt_anchor, snapshot) } } - Ordering::Equal } pub fn bias(&self) -> Bias { - self.text_anchor.bias + match self { + Anchor::Min => Bias::Left, + Anchor::Max => Bias::Right, + Anchor::Excerpt(anchor) => anchor.text_anchor.bias, + } } pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Anchor { - if self.text_anchor.bias != Bias::Left - && let Some(excerpt) = snapshot.excerpt(self.excerpt_id) - { - return Self { - excerpt_id: excerpt.id, - text_anchor: self.text_anchor.bias_left(&excerpt.buffer), - diff_base_anchor: self.diff_base_anchor.map(|a| { - if let Some(base_text) = snapshot - .diff_state(excerpt.buffer_id) - .map(|diff| diff.base_text()) - && a.is_valid(&base_text) - { - return a.bias_left(base_text); - } - a - }), - }; + match self { + Anchor::Min => *self, + Anchor::Max => snapshot.anchor_before(snapshot.max_point()), + Anchor::Excerpt(anchor) => Anchor::Excerpt(anchor.bias_left(snapshot)), } - *self } pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Anchor { - if self.text_anchor.bias != Bias::Right - && let Some(excerpt) = snapshot.excerpt(self.excerpt_id) - { - return Self { - excerpt_id: excerpt.id, - text_anchor: self.text_anchor.bias_right(&excerpt.buffer), - diff_base_anchor: self.diff_base_anchor.map(|a| { - if let Some(base_text) = snapshot - .diff_state(excerpt.buffer_id) - .map(|diff| diff.base_text()) - && a.is_valid(&base_text) - { - return a.bias_right(base_text); - } - a - }), - }; + match self { + Anchor::Max => *self, + Anchor::Min => snapshot.anchor_after(Point::zero()), + Anchor::Excerpt(anchor) => Anchor::Excerpt(anchor.bias_right(snapshot)), } - *self } pub fn summary(&self, snapshot: &MultiBufferSnapshot) -> D @@ -203,16 +342,111 @@ impl Anchor { } pub fn is_valid(&self, snapshot: &MultiBufferSnapshot) -> bool { - if self.is_min() || self.is_max() { - true - } else if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) { - (self.text_anchor == excerpt.range.context.start - || self.text_anchor == excerpt.range.context.end - || self.text_anchor.is_valid(&excerpt.buffer)) - && excerpt.contains(self) - } else { - false + match self { + Anchor::Min | Anchor::Max => true, + Anchor::Excerpt(excerpt_anchor) => excerpt_anchor.is_valid(snapshot), + } + } + + fn to_excerpt_anchor(&self, snapshot: &MultiBufferSnapshot) -> Option { + match self { + Anchor::Min => { + let excerpt = snapshot.excerpts.first()?; + + Some(ExcerptAnchor { + text_anchor: excerpt.range.context.start, + path: excerpt.path_key_index, + diff_base_anchor: None, + }) + } + Anchor::Excerpt(excerpt_anchor) => Some(*excerpt_anchor), + Anchor::Max => { + let excerpt = snapshot.excerpts.last()?; + + Some(ExcerptAnchor { + text_anchor: excerpt.range.context.end, + path: excerpt.path_key_index, + diff_base_anchor: None, + }) + } + } + } + + pub(crate) fn seek_target(&self, snapshot: &MultiBufferSnapshot) -> AnchorSeekTarget { + let Some(excerpt_anchor) = self.to_excerpt_anchor(snapshot) else { + return AnchorSeekTarget::Empty; + }; + + excerpt_anchor.seek_target(snapshot) + } + + pub(crate) fn excerpt_anchor(&self) -> Option { + match self { + Anchor::Min | Anchor::Max => None, + Anchor::Excerpt(excerpt_anchor) => Some(*excerpt_anchor), + } + } + + pub(crate) fn text_anchor(&self) -> Option { + match self { + Anchor::Min | Anchor::Max => None, + Anchor::Excerpt(excerpt_anchor) => Some(excerpt_anchor.text_anchor()), + } + } + + pub fn opaque_id(&self) -> Option<[u8; 20]> { + self.text_anchor().map(|a| a.opaque_id()) + } + + /// Note: anchor_to_buffer_anchor is probably what you want + pub fn raw_text_anchor(&self) -> Option { + match self { + Anchor::Min | Anchor::Max => None, + Anchor::Excerpt(excerpt_anchor) => Some(excerpt_anchor.text_anchor), + } + } + + pub(crate) fn try_seek_target( + &self, + snapshot: &MultiBufferSnapshot, + ) -> Option { + let Some(excerpt_anchor) = self.to_excerpt_anchor(snapshot) else { + return Some(AnchorSeekTarget::Empty); + }; + excerpt_anchor.try_seek_target(snapshot) + } + + /// Returns the text anchor for this anchor. + /// Panics if the anchor is from a different buffer. + pub fn text_anchor_in(&self, buffer: &BufferSnapshot) -> text::Anchor { + match self { + Anchor::Min => text::Anchor::min_for_buffer(buffer.remote_id()), + Anchor::Excerpt(excerpt_anchor) => { + let text_anchor = excerpt_anchor.text_anchor; + assert_eq!(text_anchor.buffer_id, buffer.remote_id()); + text_anchor + } + Anchor::Max => text::Anchor::max_for_buffer(buffer.remote_id()), + } + } + + pub fn diff_base_anchor(&self) -> Option { + self.excerpt_anchor()?.diff_base_anchor + } + + #[cfg(any(test, feature = "test-support"))] + pub fn expect_text_anchor(&self) -> text::Anchor { + self.excerpt_anchor().unwrap().text_anchor + } + + pub fn with_diff_base_anchor(mut self, diff_base_anchor: text::Anchor) -> Self { + match &mut self { + Anchor::Min | Anchor::Max => {} + Anchor::Excerpt(excerpt_anchor) => { + excerpt_anchor.diff_base_anchor = Some(diff_base_anchor); + } } + self } } diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 21b4d0e1a6c84189a9926d2d181f097c2bdf4ea7..a54ff64af028f44adced1758933f794e9a002c5a 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -8,6 +8,7 @@ use self::transaction::History; pub use anchor::{Anchor, AnchorRangeExt}; +use anchor::{AnchorSeekTarget, ExcerptAnchor}; use anyhow::{Result, anyhow}; use buffer_diff::{ BufferDiff, BufferDiffEvent, BufferDiffSnapshot, DiffChanged, DiffHunkSecondaryStatus, @@ -15,14 +16,14 @@ use buffer_diff::{ }; use clock::ReplicaId; use collections::{BTreeMap, Bound, HashMap, HashSet}; -use gpui::{App, Context, Entity, EntityId, EventEmitter}; +use gpui::{App, Context, Entity, EventEmitter}; use itertools::Itertools; use language::{ - AutoindentMode, BracketMatch, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability, - CharClassifier, CharKind, CharScopeContext, Chunk, CursorShape, DiagnosticEntryRef, File, - IndentGuideSettings, IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline, - OutlineItem, Point, PointUtf16, Selection, TextDimension, TextObject, ToOffset as _, - ToPoint as _, TransactionId, TreeSitterOptions, Unclipped, + AutoindentMode, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability, CharClassifier, + CharKind, CharScopeContext, Chunk, CursorShape, DiagnosticEntryRef, File, IndentGuideSettings, + IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline, OutlineItem, Point, + PointUtf16, Selection, TextDimension, TextObject, ToOffset as _, ToPoint as _, TransactionId, + TreeSitterOptions, Unclipped, language_settings::{AllLanguageSettings, LanguageSettings}, }; @@ -37,7 +38,8 @@ use std::{ any::type_name, borrow::Cow, cell::{Cell, OnceCell, Ref, RefCell}, - cmp, fmt, + cmp::{self, Ordering}, + fmt, future::Future, io, iter::{self, FromIterator}, @@ -51,15 +53,13 @@ use std::{ use sum_tree::{Bias, Cursor, Dimension, Dimensions, SumTree, TreeMap}; use text::{ BufferId, Edit, LineIndent, TextSummary, - locator::Locator, subscription::{Subscription, Topic}, }; use theme::SyntaxTheme; use unicode_segmentation::UnicodeSegmentation; -use util::post_inc; use ztracing::instrument; -pub use self::path_key::{PathExcerptInsertResult, PathKey}; +pub use self::path_key::PathKey; pub static EXCERPT_CONTEXT_LINES: OnceLock u32> = OnceLock::new(); @@ -67,9 +67,6 @@ pub fn excerpt_context_lines(cx: &App) -> u32 { EXCERPT_CONTEXT_LINES.get().map(|f| f(cx)).unwrap_or(2) } -#[derive(Debug, Default, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)] -pub struct ExcerptId(u32); - /// One or more [`Buffers`](Buffer) being edited in a single view. /// /// See @@ -79,10 +76,6 @@ pub struct MultiBuffer { snapshot: RefCell, /// Contains the state of the buffers being edited buffers: BTreeMap, - /// Mapping from path keys to their excerpts. - excerpts_by_path: BTreeMap>, - /// Mapping from excerpt IDs to their path key. - paths_by_excerpt: HashMap, /// Mapping from buffer IDs to their diff states diffs: HashMap, subscriptions: Topic, @@ -98,24 +91,20 @@ pub struct MultiBuffer { buffer_changed_since_sync: Rc>, } +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +struct PathKeyIndex(u64); + #[derive(Clone, Debug, PartialEq, Eq)] pub enum Event { - ExcerptsAdded { + BufferRangesUpdated { buffer: Entity, - predecessor: ExcerptId, - excerpts: Vec<(ExcerptId, ExcerptRange)>, + path_key: PathKey, + ranges: Vec>, }, - ExcerptsRemoved { - ids: Vec, - /// Contains only buffer IDs for which all excerpts have been removed. - /// Buffers that still have remaining excerpts are never included. + BuffersRemoved { removed_buffer_ids: Vec, }, - ExcerptsExpanded { - ids: Vec, - }, - ExcerptsEdited { - excerpt_ids: Vec, + BuffersEdited { buffer_ids: Vec, }, DiffHunksToggled, @@ -145,14 +134,14 @@ pub struct MultiBufferDiffHunk { pub buffer_id: BufferId, /// The range of the underlying buffer that this hunk corresponds to. pub buffer_range: Range, - /// The excerpt that contains the diff hunk. - pub excerpt_id: ExcerptId, /// The range within the buffer's diff base that this hunk corresponds to. pub diff_base_byte_range: Range, /// The status of this hunk (added/modified/deleted and secondary status). pub status: DiffHunkStatus, /// The word diffs for this hunk. pub word_diffs: Vec>, + pub excerpt_range: ExcerptRange, + pub multi_buffer_range: Range, } impl MultiBufferDiffHunk { @@ -165,17 +154,12 @@ impl MultiBufferDiffHunk { && self.buffer_range.start.is_min() && self.buffer_range.end.is_max() } - - pub fn multi_buffer_range(&self) -> Range { - let start = Anchor::in_buffer(self.excerpt_id, self.buffer_range.start); - let end = Anchor::in_buffer(self.excerpt_id, self.buffer_range.end); - start..end - } } pub type MultiBufferPoint = Point; +/// ExcerptOffset is offset into the non-deleted text of the multibuffer type ExcerptOffset = ExcerptDimension; -type ExcerptPoint = ExcerptDimension; +/// ExcerptOffset is based on the non-deleted text of the multibuffer #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq, Hash, serde::Deserialize)] #[serde(transparent)] @@ -518,10 +502,6 @@ pub trait ToPoint: 'static + fmt::Debug { struct BufferState { buffer: Entity, - last_version: RefCell, - last_non_text_state_update_count: Cell, - // Note, any changes to this field value require updating snapshot.buffer_locators as well - excerpts: Vec, _subscriptions: [gpui::Subscription; 2], } @@ -694,15 +674,31 @@ impl DiffState { } } +#[derive(Clone)] +struct BufferStateSnapshot { + path_key: PathKey, + path_key_index: PathKeyIndex, + buffer_snapshot: BufferSnapshot, +} + +impl fmt::Debug for BufferStateSnapshot { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("BufferStateSnapshot") + .field("path_key", &self.path_key) + .field("buffer_id", &self.buffer_snapshot.remote_id()) + .finish() + } +} + /// The contents of a [`MultiBuffer`] at a single point in time. #[derive(Clone, Default)] pub struct MultiBufferSnapshot { excerpts: SumTree, - buffer_locators: TreeMap>, + buffers: TreeMap, + path_keys_by_index: TreeMap, + indices_by_path_key: TreeMap, diffs: SumTree, diff_transforms: SumTree, - excerpt_ids: SumTree, - replaced_excerpts: Arc>, non_text_state_update_count: usize, edit_count: usize, is_dirty: bool, @@ -717,24 +713,12 @@ pub struct MultiBufferSnapshot { show_headers: bool, } -// follower: None -// - BufferContent(Some) -// - BufferContent(None) -// - DeletedHunk -// -// follower: Some -// - BufferContent(Some) -// - BufferContent(None) - #[derive(Debug, Clone)] enum DiffTransform { - // RealText BufferContent { summary: MBTextSummary, - // modified_hunk_info inserted_hunk_info: Option, }, - // ExpandedHunkText DeletedHunk { summary: TextSummary, buffer_id: BufferId, @@ -746,52 +730,71 @@ enum DiffTransform { #[derive(Clone, Copy, Debug)] struct DiffTransformHunkInfo { - excerpt_id: ExcerptId, + buffer_id: BufferId, hunk_start_anchor: text::Anchor, hunk_secondary_status: DiffHunkSecondaryStatus, is_logically_deleted: bool, + excerpt_end: ExcerptAnchor, } impl Eq for DiffTransformHunkInfo {} impl PartialEq for DiffTransformHunkInfo { fn eq(&self, other: &DiffTransformHunkInfo) -> bool { - self.excerpt_id == other.excerpt_id && self.hunk_start_anchor == other.hunk_start_anchor + self.buffer_id == other.buffer_id && self.hunk_start_anchor == other.hunk_start_anchor } } impl std::hash::Hash for DiffTransformHunkInfo { fn hash(&self, state: &mut H) { - self.excerpt_id.hash(state); + self.buffer_id.hash(state); self.hunk_start_anchor.hash(state); } } #[derive(Clone)] -pub struct ExcerptInfo { - pub id: ExcerptId, - pub buffer: Arc, - pub buffer_id: BufferId, +pub struct ExcerptBoundaryInfo { + pub start_anchor: Anchor, pub range: ExcerptRange, pub end_row: MultiBufferRow, } -impl std::fmt::Debug for ExcerptInfo { +impl ExcerptBoundaryInfo { + pub fn start_text_anchor(&self) -> text::Anchor { + self.range.context.start + } + pub fn buffer_id(&self) -> BufferId { + self.start_text_anchor().buffer_id + } + pub fn buffer<'a>(&self, snapshot: &'a MultiBufferSnapshot) -> &'a BufferSnapshot { + snapshot + .buffer_for_id(self.buffer_id()) + .expect("buffer snapshot not found for excerpt boundary") + } +} + +impl std::fmt::Debug for ExcerptBoundaryInfo { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct(type_name::()) - .field("id", &self.id) - .field("buffer_id", &self.buffer_id) - .field("path", &self.buffer.file().map(|f| f.path())) + .field("buffer_id", &self.buffer_id()) .field("range", &self.range) .finish() } } +impl PartialEq for ExcerptBoundaryInfo { + fn eq(&self, other: &Self) -> bool { + self.start_anchor == other.start_anchor && self.range == other.range + } +} + +impl Eq for ExcerptBoundaryInfo {} + /// A boundary between `Excerpt`s in a [`MultiBuffer`] #[derive(Debug)] pub struct ExcerptBoundary { - pub prev: Option, - pub next: ExcerptInfo, + pub prev: Option, + pub next: ExcerptBoundaryInfo, /// The row in the `MultiBuffer` where the boundary is located pub row: MultiBufferRow, } @@ -800,7 +803,7 @@ impl ExcerptBoundary { pub fn starts_new_buffer(&self) -> bool { match (self.prev.as_ref(), &self.next) { (None, _) => true, - (Some(prev), next) => prev.buffer_id != next.buffer_id, + (Some(prev), next) => prev.buffer_id() != next.buffer_id(), } } } @@ -808,7 +811,7 @@ impl ExcerptBoundary { #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct ExpandInfo { pub direction: ExpandExcerptDirection, - pub excerpt_id: ExcerptId, + pub start_anchor: Anchor, } #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] @@ -822,45 +825,20 @@ pub struct RowInfo { } /// A slice into a [`Buffer`] that is being edited in a [`MultiBuffer`]. -#[derive(Clone)] -struct Excerpt { - /// The unique identifier for this excerpt - id: ExcerptId, +#[derive(Clone, Debug)] +pub(crate) struct Excerpt { /// The location of the excerpt in the [`MultiBuffer`] - locator: Locator, - /// The buffer being excerpted - buffer_id: BufferId, - /// A snapshot of the buffer being excerpted - buffer: Arc, + pub(crate) path_key: PathKey, + pub(crate) path_key_index: PathKeyIndex, + pub(crate) buffer_id: BufferId, /// The range of the buffer to be shown in the excerpt - range: ExcerptRange, + pub(crate) range: ExcerptRange, + /// The last row in the excerpted slice of the buffer - max_buffer_row: BufferRow, + pub(crate) max_buffer_row: BufferRow, /// A summary of the text in the excerpt - text_summary: TextSummary, - has_trailing_newline: bool, -} - -/// A public view into an `Excerpt` in a [`MultiBuffer`]. -/// -/// Contains methods for getting the [`Buffer`] of the excerpt, -/// as well as mapping offsets to/from buffer and multibuffer coordinates. -#[derive(Clone)] -pub struct MultiBufferExcerpt<'a> { - excerpt: &'a Excerpt, - diff_transforms: - sum_tree::Cursor<'a, 'static, DiffTransform, DiffTransforms>, - /// The offset in the multibuffer considering diff transforms. - offset: MultiBufferOffset, - /// The offset in the multibuffer without diff transforms. - excerpt_offset: ExcerptOffset, - buffer_offset: BufferOffset, -} - -#[derive(Clone, Debug)] -struct ExcerptIdMapping { - id: ExcerptId, - locator: Locator, + pub(crate) text_summary: TextSummary, + pub(crate) has_trailing_newline: bool, } /// A range of text from a single [`Buffer`], to be shown as an `Excerpt`. @@ -883,16 +861,37 @@ impl ExcerptRange { } } -#[derive(Clone, Debug, Default)] +impl ExcerptRange { + pub fn contains(&self, t: &text::Anchor, snapshot: &BufferSnapshot) -> bool { + self.context.start.cmp(t, snapshot).is_le() && self.context.end.cmp(t, snapshot).is_ge() + } +} + +#[derive(Clone, Debug)] pub struct ExcerptSummary { - excerpt_id: ExcerptId, - /// The location of the last [`Excerpt`] being summarized - excerpt_locator: Locator, + path_key: PathKey, + max_anchor: Option, widest_line_number: u32, text: MBTextSummary, count: usize, } +impl ExcerptSummary { + pub fn min() -> Self { + ExcerptSummary { + path_key: PathKey::min(), + max_anchor: None, + widest_line_number: 0, + text: MBTextSummary::default(), + count: 0, + } + } + + fn len(&self) -> ExcerptOffset { + ExcerptDimension(self.text.len) + } +} + #[derive(Debug, Clone)] pub struct DiffTransformSummary { input: MBTextSummary, @@ -1068,13 +1067,13 @@ pub struct MultiBufferChunks<'a> { excerpts: Cursor<'a, 'static, Excerpt, ExcerptOffset>, diff_transforms: Cursor<'a, 'static, DiffTransform, Dimensions>, - diffs: &'a SumTree, diff_base_chunks: Option<(BufferId, BufferChunks<'a>)>, buffer_chunk: Option>, range: Range, excerpt_offset_range: Range, excerpt_chunks: Option>, language_aware: bool, + snapshot: &'a MultiBufferSnapshot, } pub struct ReversedMultiBufferChunks<'a> { @@ -1128,8 +1127,8 @@ impl<'a, MBD: MultiBufferDimension> Dimension<'a, DiffTransformSummary> for Diff struct MultiBufferCursor<'a, MBD, BD> { excerpts: Cursor<'a, 'static, Excerpt, ExcerptDimension>, diff_transforms: Cursor<'a, 'static, DiffTransform, DiffTransforms>, - diffs: &'a SumTree, cached_region: OnceCell>>, + snapshot: &'a MultiBufferSnapshot, } #[derive(Clone)] @@ -1144,8 +1143,8 @@ struct MultiBufferRegion<'a, MBD, BD> { } struct ExcerptChunks<'a> { - excerpt_id: ExcerptId, content_chunks: BufferChunks<'a>, + end: ExcerptAnchor, has_footer: bool, } @@ -1155,7 +1154,6 @@ struct BufferEdit { new_text: Arc, is_insertion: bool, original_indent_column: Option, - excerpt_id: ExcerptId, } #[derive(Clone, Copy, Debug, PartialEq)] @@ -1258,8 +1256,6 @@ impl MultiBuffer { singleton: false, capability, title: None, - excerpts_by_path: Default::default(), - paths_by_excerpt: Default::default(), buffer_changed_since_sync: Default::default(), history: History::default(), } @@ -1276,11 +1272,6 @@ impl MultiBuffer { *buffer_id, BufferState { buffer: buffer_state.buffer.clone(), - last_version: buffer_state.last_version.clone(), - last_non_text_state_update_count: buffer_state - .last_non_text_state_update_count - .clone(), - excerpts: buffer_state.excerpts.clone(), _subscriptions: [ new_cx.observe(&buffer_state.buffer, |_, _, cx| cx.notify()), new_cx.subscribe(&buffer_state.buffer, Self::on_buffer_event), @@ -1295,8 +1286,6 @@ impl MultiBuffer { Self { snapshot: RefCell::new(self.snapshot.borrow().clone()), buffers, - excerpts_by_path: Default::default(), - paths_by_excerpt: Default::default(), diffs: diff_bases, subscriptions: Default::default(), singleton: self.singleton, @@ -1451,7 +1440,7 @@ impl MultiBuffer { _ => Default::default(), }; - let (buffer_edits, edited_excerpt_ids) = MultiBuffer::convert_edits_to_buffer_edits( + let buffer_edits = MultiBuffer::convert_edits_to_buffer_edits( edits, this.snapshot.get_mut(), &original_indent_columns, @@ -1472,14 +1461,12 @@ impl MultiBuffer { mut new_text, mut is_insertion, original_indent_column, - excerpt_id, }) = edits.next() { while let Some(BufferEdit { range: next_range, is_insertion: next_is_insertion, new_text: next_new_text, - excerpt_id: next_excerpt_id, .. }) = edits.peek() { @@ -1492,9 +1479,7 @@ impl MultiBuffer { if should_coalesce { range.end = cmp::max(next_range.end, range.end); is_insertion |= *next_is_insertion; - if excerpt_id == *next_excerpt_id { - new_text = format!("{new_text}{next_new_text}").into(); - } + new_text = format!("{new_text}{next_new_text}").into(); edits.next(); } else { break; @@ -1542,10 +1527,7 @@ impl MultiBuffer { }) } - cx.emit(Event::ExcerptsEdited { - excerpt_ids: edited_excerpt_ids, - buffer_ids, - }); + cx.emit(Event::BuffersEdited { buffer_ids }); } } @@ -1553,9 +1535,8 @@ impl MultiBuffer { edits: Vec<(Range, Arc)>, snapshot: &MultiBufferSnapshot, original_indent_columns: &[Option], - ) -> (HashMap>, Vec) { + ) -> HashMap> { let mut buffer_edits: HashMap> = Default::default(); - let mut edited_excerpt_ids = Vec::new(); let mut cursor = snapshot.cursor::(); for (ix, (range, new_text)) in edits.into_iter().enumerate() { let original_indent_column = original_indent_columns.get(ix).copied().flatten(); @@ -1600,11 +1581,10 @@ impl MultiBuffer { let buffer_end = (end_region.buffer_range.start + end_overshoot).min(end_region.buffer_range.end); - if start_region.excerpt.id == end_region.excerpt.id { + if start_region.excerpt == end_region.excerpt { if start_region.buffer.capability == Capability::ReadWrite && start_region.is_main_buffer { - edited_excerpt_ids.push(start_region.excerpt.id); buffer_edits .entry(start_region.buffer.remote_id()) .or_default() @@ -1613,7 +1593,6 @@ impl MultiBuffer { new_text, is_insertion: true, original_indent_column, - excerpt_id: start_region.excerpt.id, }); } } else { @@ -1622,7 +1601,6 @@ impl MultiBuffer { if start_region.buffer.capability == Capability::ReadWrite && start_region.is_main_buffer { - edited_excerpt_ids.push(start_region.excerpt.id); buffer_edits .entry(start_region.buffer.remote_id()) .or_default() @@ -1631,14 +1609,11 @@ impl MultiBuffer { new_text: new_text.clone(), is_insertion: true, original_indent_column, - excerpt_id: start_region.excerpt.id, }); } - let excerpt_id = end_region.excerpt.id; if end_region.buffer.capability == Capability::ReadWrite && end_region.is_main_buffer { - edited_excerpt_ids.push(excerpt_id); buffer_edits .entry(end_region.buffer.remote_id()) .or_default() @@ -1647,18 +1622,17 @@ impl MultiBuffer { new_text: new_text.clone(), is_insertion: false, original_indent_column, - excerpt_id, }); } + let end_region_excerpt = end_region.excerpt.clone(); cursor.seek(&range.start); cursor.next_excerpt(); while let Some(region) = cursor.region() { - if region.excerpt.id == excerpt_id { + if region.excerpt == &end_region_excerpt { break; } if region.buffer.capability == Capability::ReadWrite && region.is_main_buffer { - edited_excerpt_ids.push(region.excerpt.id); buffer_edits .entry(region.buffer.remote_id()) .or_default() @@ -1667,14 +1641,13 @@ impl MultiBuffer { new_text: new_text.clone(), is_insertion: false, original_indent_column, - excerpt_id: region.excerpt.id, }); } cursor.next_excerpt(); } } } - (buffer_edits, edited_excerpt_ids) + buffer_edits } pub fn autoindent_ranges(&mut self, ranges: I, cx: &mut Context) @@ -1706,7 +1679,7 @@ impl MultiBuffer { edits: Vec<(Range, Arc)>, cx: &mut Context, ) { - let (buffer_edits, edited_excerpt_ids) = + let buffer_edits = MultiBuffer::convert_edits_to_buffer_edits(edits, this.snapshot.get_mut(), &[]); let mut buffer_ids = Vec::new(); @@ -1730,10 +1703,7 @@ impl MultiBuffer { }) } - cx.emit(Event::ExcerptsEdited { - excerpt_ids: edited_excerpt_ids, - buffer_ids, - }); + cx.emit(Event::BuffersEdited { buffer_ids }); } } @@ -1744,38 +1714,25 @@ impl MultiBuffer { cursor_shape: CursorShape, cx: &mut Context, ) { + let snapshot = self.snapshot(cx); let mut selections_by_buffer: HashMap>> = Default::default(); - let snapshot = self.read(cx); - let mut cursor = snapshot.excerpts.cursor::>(()); - for selection in selections { - let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id); - let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id); - cursor.seek(&Some(start_locator), Bias::Left); - while let Some(excerpt) = cursor.item() - && excerpt.locator <= *end_locator + for selection in selections { + for (buffer_snapshot, buffer_range, _) in + snapshot.range_to_buffer_ranges(selection.start..selection.end) { - let mut start = excerpt.range.context.start; - let mut end = excerpt.range.context.end; - if excerpt.id == selection.start.excerpt_id { - start = selection.start.text_anchor; - } - if excerpt.id == selection.end.excerpt_id { - end = selection.end.text_anchor; - } selections_by_buffer - .entry(excerpt.buffer_id) + .entry(buffer_snapshot.remote_id()) .or_default() .push(Selection { id: selection.id, - start, - end, + start: buffer_snapshot + .anchor_at(buffer_range.start, selection.start.bias()), + end: buffer_snapshot.anchor_at(buffer_range.end, selection.end.bias()), reversed: selection.reversed, goal: selection.goal, }); - - cursor.next(); } } @@ -1787,25 +1744,9 @@ impl MultiBuffer { } } - for (buffer_id, mut selections) in selections_by_buffer { + for (buffer_id, selections) in selections_by_buffer { self.buffers[&buffer_id].buffer.update(cx, |buffer, cx| { - selections.sort_unstable_by(|a, b| a.start.cmp(&b.start, buffer)); - let mut selections = selections.into_iter().peekable(); - let merged_selections = Arc::from_iter(iter::from_fn(|| { - let mut selection = selections.next()?; - while let Some(next_selection) = selections.peek() { - if selection.end.cmp(&next_selection.start, buffer).is_ge() { - let next_selection = selections.next().unwrap(); - if next_selection.end.cmp(&selection.end, buffer).is_ge() { - selection.end = next_selection.end; - } - } else { - break; - } - } - Some(selection) - })); - buffer.set_active_selections(merged_selections, line_mode, cursor_shape, cx); + buffer.set_active_selections(selections.into(), line_mode, cursor_shape, cx); }); } } @@ -1821,200 +1762,31 @@ impl MultiBuffer { #[instrument(skip_all)] fn merge_excerpt_ranges<'a>( expanded_ranges: impl IntoIterator> + 'a, - ) -> (Vec>, Vec) { + ) -> Vec> { + let mut sorted: Vec<_> = expanded_ranges.into_iter().collect(); + sorted.sort_by_key(|range| range.context.start); let mut merged_ranges: Vec> = Vec::new(); - let mut counts: Vec = Vec::new(); - for range in expanded_ranges { + for range in sorted { if let Some(last_range) = merged_ranges.last_mut() { - assert!( - last_range.context.start <= range.context.start, - "ranges must be sorted: {last_range:?} <= {range:?}" - ); if last_range.context.end >= range.context.start || last_range.context.end.row + 1 == range.context.start.row { last_range.context.end = range.context.end.max(last_range.context.end); - *counts.last_mut().unwrap() += 1; continue; } } merged_ranges.push(range.clone()); - counts.push(1); - } - (merged_ranges, counts) - } - - pub fn insert_excerpts_after( - &mut self, - prev_excerpt_id: ExcerptId, - buffer: Entity, - ranges: impl IntoIterator>, - cx: &mut Context, - ) -> Vec - where - O: text::ToOffset, - { - let mut ids = Vec::new(); - let mut next_excerpt_id = - if let Some(last_entry) = self.snapshot.borrow().excerpt_ids.last() { - last_entry.id.0 + 1 - } else { - 1 - }; - self.insert_excerpts_with_ids_after( - prev_excerpt_id, - buffer, - ranges.into_iter().map(|range| { - let id = ExcerptId(post_inc(&mut next_excerpt_id)); - ids.push(id); - (id, range) - }), - cx, - ); - ids - } - - pub fn insert_excerpts_with_ids_after( - &mut self, - prev_excerpt_id: ExcerptId, - buffer: Entity, - ranges: impl IntoIterator)>, - cx: &mut Context, - ) where - O: text::ToOffset, - { - assert_eq!(self.history.transaction_depth(), 0); - let mut ranges = ranges.into_iter().peekable(); - if ranges.peek().is_none() { - return Default::default(); - } - - self.sync_mut(cx); - - let buffer_snapshot = buffer.read(cx).snapshot(); - let buffer_id = buffer_snapshot.remote_id(); - - let buffer_state = self.buffers.entry(buffer_id).or_insert_with(|| { - self.buffer_changed_since_sync.replace(true); - buffer.update(cx, |buffer, _| { - buffer.record_changes(Rc::downgrade(&self.buffer_changed_since_sync)); - }); - BufferState { - last_version: RefCell::new(buffer_snapshot.version().clone()), - last_non_text_state_update_count: Cell::new( - buffer_snapshot.non_text_state_update_count(), - ), - excerpts: Default::default(), - _subscriptions: [ - cx.observe(&buffer, |_, _, cx| cx.notify()), - cx.subscribe(&buffer, Self::on_buffer_event), - ], - buffer: buffer.clone(), - } - }); - - let mut snapshot = self.snapshot.get_mut(); - - let mut prev_locator = snapshot.excerpt_locator_for_id(prev_excerpt_id).clone(); - let mut new_excerpt_ids = mem::take(&mut snapshot.excerpt_ids); - let mut cursor = snapshot.excerpts.cursor::>(()); - let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right); - prev_locator = cursor.start().unwrap_or(Locator::min_ref()).clone(); - - let edit_start = ExcerptDimension(new_excerpts.summary().text.len); - new_excerpts.update_last( - |excerpt| { - excerpt.has_trailing_newline = true; - }, - (), - ); - - let next_locator = if let Some(excerpt) = cursor.item() { - excerpt.locator.clone() - } else { - Locator::max() - }; - - let mut excerpts = Vec::new(); - let buffer_snapshot = Arc::new(buffer_snapshot); - while let Some((id, range)) = ranges.next() { - let locator = Locator::between(&prev_locator, &next_locator); - if let Err(ix) = buffer_state.excerpts.binary_search(&locator) { - buffer_state.excerpts.insert(ix, locator.clone()); - } - let range = ExcerptRange { - context: buffer_snapshot.anchor_before(&range.context.start) - ..buffer_snapshot.anchor_after(&range.context.end), - primary: buffer_snapshot.anchor_before(&range.primary.start) - ..buffer_snapshot.anchor_after(&range.primary.end), - }; - excerpts.push((id, range.clone())); - let excerpt = Excerpt::new( - id, - locator.clone(), - buffer_id, - buffer_snapshot.clone(), - range, - ranges.peek().is_some() || cursor.item().is_some(), - ); - new_excerpts.push(excerpt, ()); - prev_locator = locator.clone(); - - if let Some(last_mapping_entry) = new_excerpt_ids.last() { - assert!(id > last_mapping_entry.id, "excerpt ids must be increasing"); - } - new_excerpt_ids.push(ExcerptIdMapping { id, locator }, ()); - } - snapshot - .buffer_locators - .insert(buffer_id, buffer_state.excerpts.iter().cloned().collect()); - - let edit_end = ExcerptDimension(new_excerpts.summary().text.len); - - let suffix = cursor.suffix(); - let changed_trailing_excerpt = suffix.is_empty(); - new_excerpts.append(suffix, ()); - drop(cursor); - snapshot.excerpts = new_excerpts; - snapshot.excerpt_ids = new_excerpt_ids; - if changed_trailing_excerpt { - snapshot.trailing_excerpt_update_count += 1; - } - - let edits = Self::sync_diff_transforms( - &mut snapshot, - vec![Edit { - old: edit_start..edit_start, - new: edit_start..edit_end, - }], - DiffChangeKind::BufferEdited, - ); - if !edits.is_empty() { - self.subscriptions.publish(edits); } - - cx.emit(Event::Edited { - edited_buffer: None, - is_local: true, - }); - cx.emit(Event::ExcerptsAdded { - buffer, - predecessor: prev_excerpt_id, - excerpts, - }); - cx.notify(); + merged_ranges } pub fn clear(&mut self, cx: &mut Context) { self.sync_mut(cx); - let ids = self.excerpt_ids(); let removed_buffer_ids = std::mem::take(&mut self.buffers).into_keys().collect(); - self.excerpts_by_path.clear(); - self.paths_by_excerpt.clear(); + self.diffs.clear(); let MultiBufferSnapshot { excerpts, - buffer_locators, - diffs: _, + diffs, diff_transforms: _, non_text_state_update_count: _, edit_count: _, @@ -2023,27 +1795,25 @@ impl MultiBuffer { has_conflict, has_inverted_diff, singleton: _, - excerpt_ids: _, - replaced_excerpts, trailing_excerpt_update_count, all_diff_hunks_expanded: _, show_deleted_hunks: _, use_extended_diff_range: _, show_headers: _, + path_keys_by_index: _, + indices_by_path_key: _, + buffers, } = self.snapshot.get_mut(); - buffer_locators.clear(); let start = ExcerptDimension(MultiBufferOffset::ZERO); let prev_len = ExcerptDimension(excerpts.summary().text.len); *excerpts = Default::default(); + *buffers = Default::default(); + *diffs = Default::default(); *trailing_excerpt_update_count += 1; *is_dirty = false; *has_deleted_file = false; *has_conflict = false; *has_inverted_diff = false; - match Arc::get_mut(replaced_excerpts) { - Some(replaced_excerpts) => replaced_excerpts.clear(), - None => *replaced_excerpts = Default::default(), - } let edits = Self::sync_diff_transforms( self.snapshot.get_mut(), @@ -2060,120 +1830,10 @@ impl MultiBuffer { edited_buffer: None, is_local: true, }); - cx.emit(Event::ExcerptsRemoved { - ids, - removed_buffer_ids, - }); + cx.emit(Event::BuffersRemoved { removed_buffer_ids }); cx.notify(); } - #[ztracing::instrument(skip_all)] - pub fn excerpts_for_buffer( - &self, - buffer_id: BufferId, - cx: &App, - ) -> Vec<(ExcerptId, Arc, ExcerptRange)> { - let mut excerpts = Vec::new(); - let snapshot = self.read(cx); - let mut cursor = snapshot.excerpts.cursor::>(()); - if let Some(locators) = snapshot.buffer_locators.get(&buffer_id) { - for locator in &**locators { - cursor.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = cursor.item() - && excerpt.locator == *locator - { - excerpts.push((excerpt.id, excerpt.buffer.clone(), excerpt.range.clone())); - } - } - } - - excerpts - } - - pub fn excerpt_ranges_for_buffer(&self, buffer_id: BufferId, cx: &App) -> Vec> { - let snapshot = self.read(cx); - let mut excerpts = snapshot - .excerpts - .cursor::, ExcerptPoint>>(()); - let mut diff_transforms = snapshot - .diff_transforms - .cursor::>>(()); - diff_transforms.next(); - let locators = snapshot - .buffer_locators - .get(&buffer_id) - .into_iter() - .flat_map(|v| &**v); - let mut result = Vec::new(); - for locator in locators { - excerpts.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = excerpts.item() - && excerpt.locator == *locator - { - let excerpt_start = excerpts.start().1; - let excerpt_end = excerpt_start + excerpt.text_summary.lines; - - diff_transforms.seek_forward(&excerpt_start, Bias::Left); - let overshoot = excerpt_start - diff_transforms.start().0; - let start = diff_transforms.start().1 + overshoot; - - diff_transforms.seek_forward(&excerpt_end, Bias::Right); - let overshoot = excerpt_end - diff_transforms.start().0; - let end = diff_transforms.start().1 + overshoot; - - result.push(start.0..end.0) - } - } - result - } - - pub fn excerpt_buffer_ids(&self) -> Vec { - self.snapshot - .borrow() - .excerpts - .iter() - .map(|entry| entry.buffer_id) - .collect() - } - - pub fn excerpt_ids(&self) -> Vec { - let snapshot = self.snapshot.borrow(); - let mut ids = Vec::with_capacity(snapshot.excerpts.summary().count); - ids.extend(snapshot.excerpts.iter().map(|entry| entry.id)); - ids - } - - pub fn excerpt_containing( - &self, - position: impl ToOffset, - cx: &App, - ) -> Option<(ExcerptId, Entity, Range)> { - let snapshot = self.read(cx); - let offset = position.to_offset(&snapshot); - - let mut cursor = snapshot.cursor::(); - cursor.seek(&offset); - cursor - .excerpt() - .or_else(|| snapshot.excerpts.last()) - .map(|excerpt| { - ( - excerpt.id, - self.buffers.get(&excerpt.buffer_id).unwrap().buffer.clone(), - excerpt.range.context.clone(), - ) - }) - } - - pub fn buffer_for_anchor(&self, anchor: Anchor, cx: &App) -> Option> { - if let Some(buffer_id) = anchor.text_anchor.buffer_id { - self.buffer(buffer_id) - } else { - let (_, buffer, _) = self.excerpt_containing(anchor, cx)?; - Some(buffer) - } - } - // If point is at the end of the buffer, the last excerpt is returned pub fn point_to_buffer_offset( &self, @@ -2193,15 +1853,10 @@ impl MultiBuffer { &self, point: T, cx: &App, - ) -> Option<(Entity, Point, ExcerptId)> { + ) -> Option<(Entity, Point)> { let snapshot = self.read(cx); - let (buffer, point, is_main_buffer) = - snapshot.point_to_buffer_point(point.to_point(&snapshot))?; - Some(( - self.buffers.get(&buffer.remote_id())?.buffer.clone(), - point, - is_main_buffer, - )) + let (buffer, point) = snapshot.point_to_buffer_point(point.to_point(&snapshot))?; + Some((self.buffers.get(&buffer.remote_id())?.buffer.clone(), point)) } pub fn buffer_point_to_anchor( @@ -2212,263 +1867,83 @@ impl MultiBuffer { cx: &App, ) -> Option { let mut found = None; - let snapshot = buffer.read(cx).snapshot(); - for (excerpt_id, _, range) in self.excerpts_for_buffer(snapshot.remote_id(), cx) { - let start = range.context.start.to_point(&snapshot); - let end = range.context.end.to_point(&snapshot); - if start <= point && point < end { - found = Some((snapshot.clip_point(point, Bias::Left), excerpt_id)); + let buffer_snapshot = buffer.read(cx).snapshot(); + let text_anchor = buffer_snapshot.anchor_after(&point); + let snapshot = self.snapshot(cx); + let path_key_index = snapshot.path_key_index_for_buffer(buffer_snapshot.remote_id())?; + for excerpt in snapshot.excerpts_for_buffer(buffer_snapshot.remote_id()) { + if excerpt + .context + .start + .cmp(&text_anchor, &buffer_snapshot) + .is_gt() + { + found = Some(Anchor::in_buffer(path_key_index, excerpt.context.start)); + break; + } else if excerpt + .context + .end + .cmp(&text_anchor, &buffer_snapshot) + .is_ge() + { + found = Some(Anchor::in_buffer(path_key_index, text_anchor)); break; } - if point < start { - found = Some((start, excerpt_id)); - } - if point >= end { - found = Some((end, excerpt_id)); - } + found = Some(Anchor::in_buffer(path_key_index, excerpt.context.end)); } - found.map(|(point, excerpt_id)| { - let text_anchor = snapshot.anchor_after(point); - Anchor::in_buffer(excerpt_id, text_anchor) - }) + found } - pub fn buffer_anchor_to_anchor( + pub fn wait_for_anchors<'a, Anchors: 'a + Iterator>( &self, - // todo(lw): We shouldn't need this? - buffer: &Entity, - anchor: text::Anchor, - cx: &App, - ) -> Option { - let snapshot = buffer.read(cx).snapshot(); - for (excerpt_id, _, range) in self.excerpts_for_buffer(snapshot.remote_id(), cx) { - if range.context.start.cmp(&anchor, &snapshot).is_le() - && range.context.end.cmp(&anchor, &snapshot).is_ge() - { - return Some(Anchor::in_buffer(excerpt_id, anchor)); + anchors: Anchors, + cx: &mut Context, + ) -> impl 'static + Future> + use { + let mut error = None; + let mut futures = Vec::new(); + for anchor in anchors { + if let Some(excerpt_anchor) = anchor.excerpt_anchor() { + if let Some(buffer) = self.buffers.get(&excerpt_anchor.text_anchor.buffer_id) { + buffer.buffer.update(cx, |buffer, _| { + futures.push(buffer.wait_for_anchors([excerpt_anchor.text_anchor()])) + }); + } else { + error = Some(anyhow!( + "buffer {:?} is not part of this multi-buffer", + excerpt_anchor.text_anchor.buffer_id + )); + break; + } + } + } + async move { + if let Some(error) = error { + Err(error)?; + } + for future in futures { + future.await?; } + Ok(()) } + } - None + pub fn text_anchor_for_position( + &self, + position: T, + cx: &App, + ) -> Option<(Entity, text::Anchor)> { + let snapshot = self.read(cx); + let anchor = snapshot.anchor_before(position).excerpt_anchor()?; + let buffer = self + .buffers + .get(&anchor.text_anchor.buffer_id)? + .buffer + .clone(); + Some((buffer, anchor.text_anchor())) } - pub fn merge_excerpts( - &mut self, - excerpt_ids: &[ExcerptId], - cx: &mut Context, - ) -> ExcerptId { - debug_assert!(!excerpt_ids.is_empty()); - if excerpt_ids.len() == 1 { - return excerpt_ids[0]; - } - - let snapshot = self.snapshot(cx); - - let first_range = snapshot - .context_range_for_excerpt(excerpt_ids[0]) - .expect("first excerpt must exist"); - let last_range = snapshot - .context_range_for_excerpt(*excerpt_ids.last().unwrap()) - .expect("last excerpt must exist"); - - let union_range = first_range.start..last_range.end; - - drop(snapshot); - - self.resize_excerpt(excerpt_ids[0], union_range, cx); - let removed = &excerpt_ids[1..]; - for &excerpt_id in removed { - if let Some(path) = self.paths_by_excerpt.get(&excerpt_id) { - if let Some(excerpt_list) = self.excerpts_by_path.get_mut(path) { - excerpt_list.retain(|id| *id != excerpt_id); - if excerpt_list.is_empty() { - let path = path.clone(); - self.excerpts_by_path.remove(&path); - } - } - } - } - self.remove_excerpts(removed.iter().copied(), cx); - - excerpt_ids[0] - } - - pub fn remove_excerpts( - &mut self, - excerpt_ids: impl IntoIterator, - cx: &mut Context, - ) { - self.sync_mut(cx); - let ids = excerpt_ids.into_iter().collect::>(); - if ids.is_empty() { - return; - } - self.buffer_changed_since_sync.replace(true); - - let mut snapshot = self.snapshot.get_mut(); - let mut new_excerpts = SumTree::default(); - let mut cursor = snapshot - .excerpts - .cursor::, ExcerptOffset>>(()); - let mut edits = Vec::new(); - let mut excerpt_ids = ids.iter().copied().peekable(); - let mut removed_buffer_ids = Vec::new(); - let mut removed_excerpts_for_buffers = HashSet::default(); - - while let Some(excerpt_id) = excerpt_ids.next() { - self.paths_by_excerpt.remove(&excerpt_id); - // Seek to the next excerpt to remove, preserving any preceding excerpts. - let locator = snapshot.excerpt_locator_for_id(excerpt_id); - new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), ()); - - if let Some(mut excerpt) = cursor.item() { - if excerpt.id != excerpt_id { - continue; - } - let mut old_start = cursor.start().1; - - // Skip over the removed excerpt. - 'remove_excerpts: loop { - if let Some(buffer_state) = self.buffers.get_mut(&excerpt.buffer_id) { - removed_excerpts_for_buffers.insert(excerpt.buffer_id); - buffer_state.excerpts.retain(|l| l != &excerpt.locator); - if buffer_state.excerpts.is_empty() { - log::debug!( - "removing buffer and diff for buffer {}", - excerpt.buffer_id - ); - self.buffers.remove(&excerpt.buffer_id); - removed_buffer_ids.push(excerpt.buffer_id); - } - } - cursor.next(); - - // Skip over any subsequent excerpts that are also removed. - if let Some(&next_excerpt_id) = excerpt_ids.peek() { - let next_locator = snapshot.excerpt_locator_for_id(next_excerpt_id); - if let Some(next_excerpt) = cursor.item() - && next_excerpt.locator == *next_locator - { - excerpt_ids.next(); - excerpt = next_excerpt; - continue 'remove_excerpts; - } - } - - break; - } - - // When removing the last excerpt, remove the trailing newline from - // the previous excerpt. - if cursor.item().is_none() && old_start > MultiBufferOffset::ZERO { - old_start -= 1; - new_excerpts.update_last(|e| e.has_trailing_newline = false, ()); - } - - // Push an edit for the removal of this run of excerpts. - let old_end = cursor.start().1; - let new_start = ExcerptDimension(new_excerpts.summary().text.len); - edits.push(Edit { - old: old_start..old_end, - new: new_start..new_start, - }); - } - } - let suffix = cursor.suffix(); - let changed_trailing_excerpt = suffix.is_empty(); - new_excerpts.append(suffix, ()); - drop(cursor); - for buffer_id in removed_excerpts_for_buffers { - match self.buffers.get(&buffer_id) { - Some(buffer_state) => { - snapshot - .buffer_locators - .insert(buffer_id, buffer_state.excerpts.iter().cloned().collect()); - } - None => { - snapshot.buffer_locators.remove(&buffer_id); - } - } - } - snapshot.excerpts = new_excerpts; - for buffer_id in &removed_buffer_ids { - self.diffs.remove(buffer_id); - remove_diff_state(&mut snapshot.diffs, *buffer_id); - } - - if !removed_buffer_ids.is_empty() { - snapshot.has_inverted_diff = - snapshot.diffs.iter().any(|diff| diff.main_buffer.is_some()); - } - - if changed_trailing_excerpt { - snapshot.trailing_excerpt_update_count += 1; - } - - let edits = Self::sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); - if !edits.is_empty() { - self.subscriptions.publish(edits); - } - cx.emit(Event::Edited { - edited_buffer: None, - is_local: true, - }); - cx.emit(Event::ExcerptsRemoved { - ids, - removed_buffer_ids, - }); - cx.notify(); - } - - pub fn wait_for_anchors<'a, Anchors: 'a + Iterator>( - &self, - anchors: Anchors, - cx: &mut Context, - ) -> impl 'static + Future> + use { - let mut error = None; - let mut futures = Vec::new(); - for anchor in anchors { - if let Some(buffer_id) = anchor.text_anchor.buffer_id { - if let Some(buffer) = self.buffers.get(&buffer_id) { - buffer.buffer.update(cx, |buffer, _| { - futures.push(buffer.wait_for_anchors([anchor.text_anchor])) - }); - } else { - error = Some(anyhow!( - "buffer {buffer_id} is not part of this multi-buffer" - )); - break; - } - } - } - async move { - if let Some(error) = error { - Err(error)?; - } - for future in futures { - future.await?; - } - Ok(()) - } - } - - pub fn text_anchor_for_position( - &self, - position: T, - cx: &App, - ) -> Option<(Entity, language::Anchor)> { - let snapshot = self.read(cx); - let anchor = snapshot.anchor_before(position); - let buffer = self - .buffers - .get(&anchor.text_anchor.buffer_id?)? - .buffer - .clone(); - Some((buffer, anchor.text_anchor)) - } - - fn on_buffer_event( + fn on_buffer_event( &mut self, buffer: Entity, event: &language::BufferEvent, @@ -2532,12 +2007,15 @@ impl MultiBuffer { range: Range, cx: &mut Context, ) { - self.sync_mut(cx); + let Some(buffer) = self.buffer(diff.read(cx).buffer_id) else { + return; + }; + let snapshot = self.sync_mut(cx); let diff = diff.read(cx); let buffer_id = diff.buffer_id; - let Some(buffer_state) = self.buffers.get(&buffer_id) else { + let Some(path) = snapshot.path_for_buffer(buffer_id).cloned() else { return; }; let new_diff = DiffStateSnapshot { @@ -2545,17 +2023,17 @@ impl MultiBuffer { diff: diff.snapshot(cx), main_buffer: None, }; - let mut snapshot = self.snapshot.get_mut(); + let snapshot = self.snapshot.get_mut(); let base_text_changed = find_diff_state(&snapshot.diffs, buffer_id) .is_none_or(|old_diff| !new_diff.base_texts_definitely_eq(old_diff)); snapshot.diffs.insert_or_replace(new_diff, ()); - let buffer = buffer_state.buffer.read(cx); + let buffer = buffer.read(cx); let diff_change_range = range.to_offset(buffer); - let excerpt_edits = snapshot.excerpt_edits_for_diff_change(buffer_state, diff_change_range); + let excerpt_edits = snapshot.excerpt_edits_for_diff_change(&path, diff_change_range); let edits = Self::sync_diff_transforms( - &mut snapshot, + snapshot, excerpt_edits, DiffChangeKind::DiffUpdated { base_changed: base_text_changed, @@ -2577,10 +2055,10 @@ impl MultiBuffer { diff_change_range: Option>, cx: &mut Context, ) { - self.sync_mut(cx); + let snapshot = self.sync_mut(cx); let base_text_buffer_id = diff.read(cx).base_text_buffer().read(cx).remote_id(); - let Some(buffer_state) = self.buffers.get(&base_text_buffer_id) else { + let Some(path) = snapshot.path_for_buffer(base_text_buffer_id).cloned() else { return; }; @@ -2591,16 +2069,16 @@ impl MultiBuffer { diff: diff.snapshot(cx), main_buffer: Some(main_buffer_snapshot), }; - let mut snapshot = self.snapshot.get_mut(); + let snapshot = self.snapshot.get_mut(); snapshot.diffs.insert_or_replace(new_diff, ()); let Some(diff_change_range) = diff_change_range else { return; }; - let excerpt_edits = snapshot.excerpt_edits_for_diff_change(buffer_state, diff_change_range); + let excerpt_edits = snapshot.excerpt_edits_for_diff_change(&path, diff_change_range); let edits = Self::sync_diff_transforms( - &mut snapshot, + snapshot, excerpt_edits, DiffChangeKind::DiffUpdated { // We don't read this field for inverted diffs. @@ -2624,14 +2102,6 @@ impl MultiBuffer { self.all_buffers_iter().collect() } - pub fn all_buffer_ids_iter(&self) -> impl Iterator { - self.buffers.keys().copied() - } - - pub fn all_buffer_ids(&self) -> Vec { - self.all_buffer_ids_iter().collect() - } - pub fn buffer(&self, buffer_id: BufferId) -> Option> { self.buffers .get(&buffer_id) @@ -2644,14 +2114,11 @@ impl MultiBuffer { } pub fn language_settings<'a>(&'a self, cx: &'a App) -> Cow<'a, LanguageSettings> { - let buffer_id = self - .snapshot - .borrow() + let snapshot = self.snapshot(cx); + snapshot .excerpts .first() - .map(|excerpt| excerpt.buffer.remote_id()); - buffer_id - .and_then(|buffer_id| self.buffer(buffer_id)) + .and_then(|excerpt| self.buffer(excerpt.range.context.start.buffer_id)) .map(|buffer| LanguageSettings::for_buffer(&buffer.read(cx), cx)) .unwrap_or_else(move || self.language_settings_at(MultiBufferOffset::default(), cx)) } @@ -2814,7 +2281,7 @@ impl MultiBuffer { pub fn set_all_diff_hunks_expanded(&mut self, cx: &mut Context) { self.snapshot.get_mut().all_diff_hunks_expanded = true; - self.expand_or_collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], true, cx); + self.expand_or_collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], true, cx); } pub fn all_diff_hunks_expanded(&self) -> bool { @@ -2823,7 +2290,7 @@ impl MultiBuffer { pub fn set_all_diff_hunks_collapsed(&mut self, cx: &mut Context) { self.snapshot.get_mut().all_diff_hunks_expanded = false; - self.expand_or_collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], false, cx); + self.expand_or_collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], false, cx); } pub fn set_show_deleted_hunks(&mut self, show: bool, cx: &mut Context) { @@ -2833,7 +2300,7 @@ impl MultiBuffer { let old_len = self.snapshot.borrow().len(); - let ranges = std::iter::once((Point::zero()..Point::MAX, ExcerptId::max())); + let ranges = std::iter::once((Point::zero()..Point::MAX, None)); let _ = self.expand_or_collapse_diff_hunks_inner(ranges, true, cx); let new_len = self.snapshot.borrow().len(); @@ -2856,7 +2323,7 @@ impl MultiBuffer { pub fn has_multiple_hunks(&self, cx: &App) -> bool { self.read(cx) - .diff_hunks_in_range(Anchor::min()..Anchor::max()) + .diff_hunks_in_range(Anchor::Min..Anchor::Max) .nth(1) .is_some() } @@ -2902,7 +2369,7 @@ impl MultiBuffer { pub fn expand_or_collapse_diff_hunks_inner( &mut self, - ranges: impl IntoIterator, ExcerptId)>, + ranges: impl IntoIterator, Option)>, expand: bool, cx: &mut Context, ) -> Vec> { @@ -2913,18 +2380,34 @@ impl MultiBuffer { let mut snapshot = self.snapshot.get_mut(); let mut excerpt_edits = Vec::new(); let mut last_hunk_row = None; - for (range, end_excerpt_id) in ranges { + for (range, end_anchor) in ranges { for diff_hunk in snapshot.diff_hunks_in_range(range) { - if diff_hunk.excerpt_id.cmp(&end_excerpt_id, &snapshot).is_gt() { + if let Some(end_anchor) = &end_anchor + && let Some(hunk_end_anchor) = + snapshot.anchor_in_excerpt(diff_hunk.excerpt_range.context.end) + && hunk_end_anchor.cmp(end_anchor, snapshot).is_gt() + { + continue; + } + let hunk_range = diff_hunk.multi_buffer_range; + if let Some(excerpt_start_anchor) = + snapshot.anchor_in_excerpt(diff_hunk.excerpt_range.context.start) + && hunk_range.start.to_point(snapshot) < excerpt_start_anchor.to_point(snapshot) + { continue; } if last_hunk_row.is_some_and(|row| row >= diff_hunk.row_range.start) { continue; } - let start = Anchor::in_buffer(diff_hunk.excerpt_id, diff_hunk.buffer_range.start); - let end = Anchor::in_buffer(diff_hunk.excerpt_id, diff_hunk.buffer_range.end); - let start = snapshot.excerpt_offset_for_anchor(&start); - let end = snapshot.excerpt_offset_for_anchor(&end); + let mut start = snapshot.excerpt_offset_for_anchor(&hunk_range.start); + let mut end = snapshot.excerpt_offset_for_anchor(&hunk_range.end); + if let Some(excerpt_end_anchor) = + snapshot.anchor_in_excerpt(diff_hunk.excerpt_range.context.end) + { + let excerpt_end = snapshot.excerpt_offset_for_anchor(&excerpt_end_anchor); + start = start.min(excerpt_end); + end = end.min(excerpt_end); + }; last_hunk_row = Some(diff_hunk.row_range.start); excerpt_edits.push(text::Edit { old: start..end, @@ -2947,15 +2430,18 @@ impl MultiBuffer { cx: &mut Context, ) { let snapshot = self.snapshot.borrow().clone(); - let ranges = ranges.iter().map(move |range| { - let end_excerpt_id = range.end.excerpt_id; - let range = range.to_point(&snapshot); - let mut peek_end = range.end; - if range.end.row < snapshot.max_row().0 { - peek_end = Point::new(range.end.row + 1, 0); - }; - (range.start..peek_end, end_excerpt_id) - }); + let ranges = + ranges.iter().map(move |range| { + let excerpt_end = snapshot.excerpt_containing(range.end..range.end).and_then( + |(_, excerpt_range)| snapshot.anchor_in_excerpt(excerpt_range.context.end), + ); + let range = range.to_point(&snapshot); + let mut peek_end = range.end; + if range.end.row < snapshot.max_row().0 { + peek_end = Point::new(range.end.row + 1, 0); + }; + (range.start..peek_end, excerpt_end) + }); let edits = self.expand_or_collapse_diff_hunks_inner(ranges, expand, cx); if !edits.is_empty() { self.subscriptions.publish(edits); @@ -2967,184 +2453,6 @@ impl MultiBuffer { }); } - pub fn resize_excerpt( - &mut self, - id: ExcerptId, - range: Range, - cx: &mut Context, - ) { - self.sync_mut(cx); - - let mut snapshot = self.snapshot.get_mut(); - let locator = snapshot.excerpt_locator_for_id(id); - let mut new_excerpts = SumTree::default(); - let mut cursor = snapshot - .excerpts - .cursor::, ExcerptOffset>>(()); - let mut edits = Vec::>::new(); - - let prefix = cursor.slice(&Some(locator), Bias::Left); - new_excerpts.append(prefix, ()); - - let mut excerpt = cursor.item().unwrap().clone(); - let old_text_len = excerpt.text_summary.len; - - excerpt.range.context.start = range.start; - excerpt.range.context.end = range.end; - excerpt.max_buffer_row = range.end.to_point(&excerpt.buffer).row; - - excerpt.text_summary = excerpt - .buffer - .text_summary_for_range(excerpt.range.context.clone()); - - let new_start_offset = ExcerptDimension(new_excerpts.summary().text.len); - let old_start_offset = cursor.start().1; - let new_text_len = excerpt.text_summary.len; - let edit = Edit { - old: old_start_offset..old_start_offset + old_text_len, - new: new_start_offset..new_start_offset + new_text_len, - }; - - if let Some(last_edit) = edits.last_mut() { - if last_edit.old.end == edit.old.start { - last_edit.old.end = edit.old.end; - last_edit.new.end = edit.new.end; - } else { - edits.push(edit); - } - } else { - edits.push(edit); - } - - new_excerpts.push(excerpt, ()); - - cursor.next(); - - new_excerpts.append(cursor.suffix(), ()); - - drop(cursor); - snapshot.excerpts = new_excerpts; - - let edits = Self::sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); - if !edits.is_empty() { - self.subscriptions.publish(edits); - } - cx.emit(Event::Edited { - edited_buffer: None, - is_local: true, - }); - cx.emit(Event::ExcerptsExpanded { ids: vec![id] }); - cx.notify(); - } - - pub fn expand_excerpts( - &mut self, - ids: impl IntoIterator, - line_count: u32, - direction: ExpandExcerptDirection, - cx: &mut Context, - ) { - if line_count == 0 { - return; - } - self.sync_mut(cx); - if !self.excerpts_by_path.is_empty() { - self.expand_excerpts_with_paths(ids, line_count, direction, cx); - return; - } - let mut snapshot = self.snapshot.get_mut(); - - let ids = ids.into_iter().collect::>(); - let locators = snapshot.excerpt_locators_for_ids(ids.iter().copied()); - let mut new_excerpts = SumTree::default(); - let mut cursor = snapshot - .excerpts - .cursor::, ExcerptOffset>>(()); - let mut edits = Vec::>::new(); - - for locator in &locators { - let prefix = cursor.slice(&Some(locator), Bias::Left); - new_excerpts.append(prefix, ()); - - let mut excerpt = cursor.item().unwrap().clone(); - let old_text_len = excerpt.text_summary.len; - - let up_line_count = if direction.should_expand_up() { - line_count - } else { - 0 - }; - - let start_row = excerpt - .range - .context - .start - .to_point(&excerpt.buffer) - .row - .saturating_sub(up_line_count); - let start_point = Point::new(start_row, 0); - excerpt.range.context.start = excerpt.buffer.anchor_before(start_point); - - let down_line_count = if direction.should_expand_down() { - line_count - } else { - 0 - }; - - let mut end_point = excerpt.buffer.clip_point( - excerpt.range.context.end.to_point(&excerpt.buffer) - + Point::new(down_line_count, 0), - Bias::Left, - ); - end_point.column = excerpt.buffer.line_len(end_point.row); - excerpt.range.context.end = excerpt.buffer.anchor_after(end_point); - excerpt.max_buffer_row = end_point.row; - - excerpt.text_summary = excerpt - .buffer - .text_summary_for_range(excerpt.range.context.clone()); - - let new_start_offset = ExcerptDimension(new_excerpts.summary().text.len); - let old_start_offset = cursor.start().1; - let new_text_len = excerpt.text_summary.len; - let edit = Edit { - old: old_start_offset..old_start_offset + old_text_len, - new: new_start_offset..new_start_offset + new_text_len, - }; - - if let Some(last_edit) = edits.last_mut() { - if last_edit.old.end == edit.old.start { - last_edit.old.end = edit.old.end; - last_edit.new.end = edit.new.end; - } else { - edits.push(edit); - } - } else { - edits.push(edit); - } - - new_excerpts.push(excerpt, ()); - - cursor.next(); - } - - new_excerpts.append(cursor.suffix(), ()); - - drop(cursor); - snapshot.excerpts = new_excerpts; - - let edits = Self::sync_diff_transforms(&mut snapshot, edits, DiffChangeKind::BufferEdited); - if !edits.is_empty() { - self.subscriptions.publish(edits); - } - cx.emit(Event::Edited { - edited_buffer: None, - is_local: true, - }); - cx.emit(Event::ExcerptsExpanded { ids }); - cx.notify(); - } - #[ztracing::instrument(skip_all)] fn sync(&self, cx: &App) { let changed = self.buffer_changed_since_sync.replace(false); @@ -3162,17 +2470,19 @@ impl MultiBuffer { } } - fn sync_mut(&mut self, cx: &App) { + fn sync_mut(&mut self, cx: &App) -> &mut MultiBufferSnapshot { + let snapshot = self.snapshot.get_mut(); let changed = self.buffer_changed_since_sync.replace(false); if !changed { - return; + return snapshot; } - let edits = - Self::sync_from_buffer_changes(self.snapshot.get_mut(), &self.buffers, &self.diffs, cx); + let edits = Self::sync_from_buffer_changes(snapshot, &self.buffers, &self.diffs, cx); if !edits.is_empty() { self.subscriptions.publish(edits); } + + snapshot } fn sync_from_buffer_changes( @@ -3183,8 +2493,10 @@ impl MultiBuffer { ) -> Vec> { let MultiBufferSnapshot { excerpts, - buffer_locators: _, diffs: buffer_diff, + buffers: buffer_snapshots, + path_keys_by_index: _, + indices_by_path_key: _, diff_transforms: _, non_text_state_update_count, edit_count, @@ -3193,8 +2505,6 @@ impl MultiBuffer { has_conflict, has_inverted_diff: _, singleton: _, - excerpt_ids: _, - replaced_excerpts: _, trailing_excerpt_update_count: _, all_diff_hunks_expanded: _, show_deleted_hunks: _, @@ -3227,28 +2537,32 @@ impl MultiBuffer { buffer_diff.edit(diffs_to_add, ()); } - let mut excerpts_to_edit = Vec::new(); + let mut paths_to_edit = Vec::new(); let mut non_text_state_updated = false; let mut edited = false; for buffer_state in buffers.values() { let buffer = buffer_state.buffer.read(cx); - let version = buffer.version(); + let last_snapshot = buffer_snapshots + .get(&buffer.remote_id()) + .expect("each buffer should have a snapshot"); + let current_version = buffer.version(); let non_text_state_update_count = buffer.non_text_state_update_count(); - let buffer_edited = version.changed_since(&buffer_state.last_version.borrow()); - let buffer_non_text_state_updated = - non_text_state_update_count > buffer_state.last_non_text_state_update_count.get(); + let buffer_edited = + current_version.changed_since(last_snapshot.buffer_snapshot.version()); + let buffer_non_text_state_updated = non_text_state_update_count + > last_snapshot.buffer_snapshot.non_text_state_update_count(); if buffer_edited || buffer_non_text_state_updated { - *buffer_state.last_version.borrow_mut() = version; - buffer_state - .last_non_text_state_update_count - .set(non_text_state_update_count); - excerpts_to_edit.extend( - buffer_state - .excerpts - .iter() - .map(|locator| (locator, buffer_state.buffer.clone(), buffer_edited)), - ); + paths_to_edit.push(( + last_snapshot.path_key.clone(), + last_snapshot.path_key_index, + buffer_state.buffer.clone(), + if buffer_edited { + Some(last_snapshot.buffer_snapshot.version().clone()) + } else { + None + }, + )); } edited |= buffer_edited; @@ -3266,55 +2580,64 @@ impl MultiBuffer { *non_text_state_update_count += 1; } - excerpts_to_edit.sort_unstable_by_key(|&(locator, _, _)| locator); + paths_to_edit.sort_unstable_by_key(|(path, _, _, _)| path.clone()); let mut edits = Vec::new(); let mut new_excerpts = SumTree::default(); - let mut cursor = excerpts.cursor::, ExcerptOffset>>(()); + let mut cursor = excerpts.cursor::(()); - for (locator, buffer, buffer_edited) in excerpts_to_edit { - new_excerpts.append(cursor.slice(&Some(locator), Bias::Left), ()); - let old_excerpt = cursor.item().unwrap(); + for (path, path_key_index, buffer, prev_version) in paths_to_edit { + new_excerpts.append(cursor.slice(&path, Bias::Left), ()); let buffer = buffer.read(cx); let buffer_id = buffer.remote_id(); - let mut new_excerpt; - if buffer_edited { - edits.extend( - buffer - .edits_since_in_range::( - old_excerpt.buffer.version(), - old_excerpt.range.context.clone(), - ) - .map(|edit| { - let excerpt_old_start = cursor.start().1; - let excerpt_new_start = - ExcerptDimension(new_excerpts.summary().text.len); - let old_start = excerpt_old_start + edit.old.start; - let old_end = excerpt_old_start + edit.old.end; - let new_start = excerpt_new_start + edit.new.start; - let new_end = excerpt_new_start + edit.new.end; - Edit { - old: old_start..old_end, - new: new_start..new_end, - } - }), - ); - new_excerpt = Excerpt::new( - old_excerpt.id, - locator.clone(), - buffer_id, - Arc::new(buffer.snapshot()), - old_excerpt.range.clone(), - old_excerpt.has_trailing_newline, - ); - } else { - new_excerpt = old_excerpt.clone(); - new_excerpt.buffer = Arc::new(buffer.snapshot()); - } + buffer_snapshots.insert( + buffer_id, + BufferStateSnapshot { + path_key: path.clone(), + path_key_index, + buffer_snapshot: buffer.snapshot(), + }, + ); - new_excerpts.push(new_excerpt, ()); - cursor.next(); + if let Some(prev_version) = &prev_version { + while let Some(old_excerpt) = cursor.item() + && &old_excerpt.path_key == &path + { + edits.extend( + buffer + .edits_since_in_range::( + prev_version, + old_excerpt.range.context.clone(), + ) + .map(|edit| { + let excerpt_old_start = cursor.start().len(); + let excerpt_new_start = + ExcerptDimension(new_excerpts.summary().text.len); + let old_start = excerpt_old_start + edit.old.start; + let old_end = excerpt_old_start + edit.old.end; + let new_start = excerpt_new_start + edit.new.start; + let new_end = excerpt_new_start + edit.new.end; + Edit { + old: old_start..old_end, + new: new_start..new_end, + } + }), + ); + + let excerpt = Excerpt::new( + old_excerpt.path_key.clone(), + old_excerpt.path_key_index, + &buffer.snapshot(), + old_excerpt.range.clone(), + old_excerpt.has_trailing_newline, + ); + new_excerpts.push(excerpt, ()); + cursor.next(); + } + } else { + new_excerpts.append(cursor.slice(&path, Bias::Right), ()); + }; } new_excerpts.append(cursor.suffix(), ()); @@ -3416,7 +2739,8 @@ impl MultiBuffer { { return true; } - hunk.hunk_start_anchor.is_valid(&excerpt.buffer) + hunk.hunk_start_anchor + .is_valid(&excerpt.buffer_snapshot(&snapshot)) }), _ => true, }; @@ -3513,10 +2837,10 @@ impl MultiBuffer { // Recompute the expanded hunks in the portion of the excerpt that // intersects the edit. if let Some(diff) = find_diff_state(&snapshot.diffs, excerpt.buffer_id) { - let buffer = &excerpt.buffer; + let buffer_snapshot = &excerpt.buffer_snapshot(&snapshot); let excerpt_start = *excerpts.start(); let excerpt_end = excerpt_start + excerpt.text_summary.len; - let excerpt_buffer_start = excerpt.range.context.start.to_offset(buffer); + let excerpt_buffer_start = excerpt.range.context.start.to_offset(buffer_snapshot); let excerpt_buffer_end = excerpt_buffer_start + excerpt.text_summary.len; let edit_buffer_start = excerpt_buffer_start + edit.new.start.saturating_sub(excerpt_start); @@ -3535,7 +2859,6 @@ impl MultiBuffer { log::trace!("skipping hunk that starts before excerpt"); continue; } - hunk_buffer_range.end.to_point(&excerpt.buffer); let hunk_excerpt_start = excerpt_start + hunk_buffer_range.start.saturating_sub(excerpt_buffer_start); let hunk_excerpt_end = excerpt_end @@ -3548,9 +2871,10 @@ impl MultiBuffer { ); if !hunk_buffer_range.is_empty() { let hunk_info = DiffTransformHunkInfo { - excerpt_id: excerpt.id, + buffer_id: buffer_snapshot.remote_id(), hunk_start_anchor: hunk.buffer_range.start, hunk_secondary_status: hunk.secondary_status, + excerpt_end: excerpt.end_anchor(), is_logically_deleted: true, }; *end_of_current_insert = @@ -3558,23 +2882,24 @@ impl MultiBuffer { } } } else { - let edit_anchor_range = buffer.anchor_before(edit_buffer_start) - ..buffer.anchor_after(edit_buffer_end); - for hunk in diff.hunks_intersecting_range(edit_anchor_range, buffer) { + let edit_anchor_range = buffer_snapshot.anchor_before(edit_buffer_start) + ..buffer_snapshot.anchor_after(edit_buffer_end); + for hunk in diff.hunks_intersecting_range(edit_anchor_range, buffer_snapshot) { if hunk.is_created_file() && !all_diff_hunks_expanded { continue; } - let hunk_buffer_range = hunk.buffer_range.to_offset(buffer); + let hunk_buffer_range = hunk.buffer_range.to_offset(buffer_snapshot); if hunk_buffer_range.start < excerpt_buffer_start { log::trace!("skipping hunk that starts before excerpt"); continue; } let hunk_info = DiffTransformHunkInfo { - excerpt_id: excerpt.id, + buffer_id: buffer_snapshot.remote_id(), hunk_start_anchor: hunk.buffer_range.start, hunk_secondary_status: hunk.secondary_status, + excerpt_end: excerpt.end_anchor(), is_logically_deleted: false, }; @@ -3599,7 +2924,7 @@ impl MultiBuffer { } DiffChangeKind::ExpandOrCollapseHunks { expand } => { let intersects = hunk_buffer_range.is_empty() - || hunk_buffer_range.end > edit_buffer_start; + || (hunk_buffer_range.end > edit_buffer_start); if *expand { intersects || was_previously_expanded || all_diff_hunks_expanded } else { @@ -3613,9 +2938,8 @@ impl MultiBuffer { if should_expand_hunk { did_expand_hunks = true; log::trace!( - "expanding hunk {:?}, excerpt:{:?}", + "expanding hunk {:?}", hunk_excerpt_start..hunk_excerpt_end, - excerpt.id ); if !hunk.diff_base_byte_range.is_empty() @@ -3639,7 +2963,7 @@ impl MultiBuffer { DiffTransform::DeletedHunk { base_text_byte_range: hunk.diff_base_byte_range.clone(), summary: base_text_summary, - buffer_id: excerpt.buffer_id, + buffer_id: buffer_snapshot.remote_id(), hunk_info, has_trailing_newline, }, @@ -3766,11 +3090,13 @@ impl MultiBuffer { pub fn toggle_single_diff_hunk(&mut self, range: Range, cx: &mut Context) { let snapshot = self.snapshot(cx); - let excerpt_id = range.end.excerpt_id; + let excerpt_end = snapshot + .excerpt_containing(range.end..range.end) + .and_then(|(_, excerpt_range)| snapshot.anchor_in_excerpt(excerpt_range.context.end)); let point_range = range.to_point(&snapshot); let expand = !self.single_hunk_is_expanded(range, cx); let edits = - self.expand_or_collapse_diff_hunks_inner([(point_range, excerpt_id)], expand, cx); + self.expand_or_collapse_diff_hunks_inner([(point_range, excerpt_end)], expand, cx); if !edits.is_empty() { self.subscriptions.publish(edits); } @@ -3896,38 +3222,15 @@ impl MultiBuffer { use std::env; use util::RandomCharIter; - let max_excerpts = env::var("MAX_EXCERPTS") + let max_buffers = env::var("MAX_BUFFERS") .map(|i| i.parse().expect("invalid `MAX_EXCERPTS` variable")) .unwrap_or(5); let mut buffers = Vec::new(); for _ in 0..mutation_count { - if rng.random_bool(0.05) { - log::info!("Clearing multi-buffer"); - self.clear(cx); - continue; - } else if rng.random_bool(0.1) && !self.excerpt_ids().is_empty() { - let ids = self.excerpt_ids(); - let mut excerpts = HashSet::default(); - for _ in 0..rng.random_range(0..ids.len()) { - excerpts.extend(ids.choose(rng).copied()); - } - - let line_count = rng.random_range(0..5); - - log::info!("Expanding excerpts {excerpts:?} by {line_count} lines"); - - self.expand_excerpts( - excerpts.iter().cloned(), - line_count, - ExpandExcerptDirection::UpAndDown, - cx, - ); - continue; - } - - let excerpt_ids = self.excerpt_ids(); - if excerpt_ids.is_empty() || (rng.random() && excerpt_ids.len() < max_excerpts) { + let snapshot = self.snapshot(cx); + let buffer_ids = snapshot.all_buffer_ids().collect::>(); + if buffer_ids.is_empty() || (rng.random() && buffer_ids.len() < max_buffers) { let buffer_handle = if rng.random() || self.buffers.is_empty() { let text = RandomCharIter::new(&mut *rng).take(10).collect::(); buffers.push(cx.new(|cx| Buffer::local(text, cx))); @@ -3944,12 +3247,21 @@ impl MultiBuffer { let buffer = buffer_handle.read(cx); let buffer_text = buffer.text(); + let buffer_snapshot = buffer.snapshot(); + let mut next_min_start_ix = 0; let ranges = (0..rng.random_range(0..5)) - .map(|_| { - let end_ix = - buffer.clip_offset(rng.random_range(0..=buffer.len()), Bias::Right); - let start_ix = buffer.clip_offset(rng.random_range(0..=end_ix), Bias::Left); - ExcerptRange::new(start_ix..end_ix) + .filter_map(|_| { + if next_min_start_ix >= buffer.len() { + return None; + } + let end_ix = buffer.clip_offset( + rng.random_range(next_min_start_ix..=buffer.len()), + Bias::Right, + ); + let start_ix = buffer + .clip_offset(rng.random_range(next_min_start_ix..=end_ix), Bias::Left); + next_min_start_ix = buffer.text().ceil_char_boundary(end_ix + 1); + Some(ExcerptRange::new(start_ix..end_ix)) }) .collect::>(); log::info!( @@ -3962,20 +3274,26 @@ impl MultiBuffer { .collect::>() ); - let excerpt_id = - self.insert_excerpts_after(ExcerptId::max(), buffer_handle, ranges, cx); - log::info!("Inserted with ids: {:?}", excerpt_id); + let path_key = PathKey::for_buffer(&buffer_handle, cx); + self.set_merged_excerpt_ranges_for_path( + path_key.clone(), + buffer_handle, + &buffer_snapshot, + ranges, + cx, + ); + log::info!("Inserted with path_key: {:?}", path_key); } else { - let remove_count = rng.random_range(1..=excerpt_ids.len()); - let mut excerpts_to_remove = excerpt_ids - .choose_multiple(rng, remove_count) - .cloned() - .collect::>(); - let snapshot = self.snapshot.borrow(); - excerpts_to_remove.sort_unstable_by(|a, b| a.cmp(b, &snapshot)); - drop(snapshot); - log::info!("Removing excerpts {:?}", excerpts_to_remove); - self.remove_excerpts(excerpts_to_remove, cx); + let path_key = self + .snapshot + .borrow() + .buffers + .get(&buffer_ids.choose(rng).unwrap()) + .unwrap() + .path_key + .clone(); + log::info!("Removing excerpts {:?}", path_key); + self.remove_excerpts(path_key, cx); } } } @@ -4083,7 +3401,7 @@ impl MultiBufferSnapshot { } pub fn diff_hunks(&self) -> impl Iterator + '_ { - self.diff_hunks_in_range(Anchor::min()..Anchor::max()) + self.diff_hunks_in_range(Anchor::Min..Anchor::Max) } pub fn diff_hunks_in_range( @@ -4121,6 +3439,7 @@ impl MultiBufferSnapshot { })) }) .filter_map(move |(range, (hunk, is_inverted), excerpt)| { + let buffer_snapshot = excerpt.buffer_snapshot(self); if range.start != range.end && range.end == query_range.start && !hunk.range.is_empty() { return None; @@ -4139,12 +3458,12 @@ impl MultiBufferSnapshot { if self.show_deleted_hunks || is_inverted { let hunk_start_offset = if is_inverted { Anchor::in_buffer( - excerpt.id, - excerpt.buffer.anchor_after(hunk.diff_base_byte_range.start), + excerpt.path_key_index, + buffer_snapshot.anchor_after(hunk.diff_base_byte_range.start), ) .to_offset(self) } else { - Anchor::in_buffer(excerpt.id, hunk.buffer_range.start) + Anchor::in_buffer(excerpt.path_key_index, hunk.buffer_range.start) .to_offset(self) }; @@ -4155,7 +3474,8 @@ impl MultiBufferSnapshot { if !is_inverted { word_diffs.extend(hunk.buffer_word_diffs.into_iter().map(|diff| { - Anchor::range_in_buffer(excerpt.id, diff).to_offset(self) + Anchor::range_in_buffer(excerpt.path_key_index, diff) + .to_offset(self) })); } word_diffs @@ -4163,8 +3483,8 @@ impl MultiBufferSnapshot { .unwrap_or_default(); let buffer_range = if is_inverted { - excerpt.buffer.anchor_after(hunk.diff_base_byte_range.start) - ..excerpt.buffer.anchor_before(hunk.diff_base_byte_range.end) + buffer_snapshot.anchor_after(hunk.diff_base_byte_range.start) + ..buffer_snapshot.anchor_before(hunk.diff_base_byte_range.end) } else { hunk.buffer_range.clone() }; @@ -4175,10 +3495,11 @@ impl MultiBufferSnapshot { } else { DiffHunkStatusKind::Modified }; + let multi_buffer_range = + Anchor::range_in_buffer(excerpt.path_key_index, buffer_range.clone()); Some(MultiBufferDiffHunk { row_range: MultiBufferRow(range.start.row)..MultiBufferRow(end_row), - buffer_id: excerpt.buffer_id, - excerpt_id: excerpt.id, + buffer_id: buffer_snapshot.remote_id(), buffer_range, word_diffs, diff_base_byte_range: BufferOffset(hunk.diff_base_byte_range.start) @@ -4187,6 +3508,8 @@ impl MultiBufferSnapshot { kind: status_kind, secondary: hunk.secondary_status, }, + excerpt_range: excerpt.range.clone(), + multi_buffer_range, }) }) } @@ -4206,16 +3529,9 @@ impl MultiBufferSnapshot { return None; } let excerpt = region.excerpt; - cursor.next_excerpt_forwards(); - Some(excerpt) - }) - } - - pub fn excerpt_ids_for_range( - &self, - range: Range, - ) -> impl Iterator + '_ { - self.excerpts_for_range(range).map(|excerpt| excerpt.id) + cursor.next_excerpt_forwards(); + Some(excerpt) + }) } pub fn buffer_ids_for_range( @@ -4223,7 +3539,7 @@ impl MultiBufferSnapshot { range: Range, ) -> impl Iterator + '_ { self.excerpts_for_range(range) - .map(|excerpt| excerpt.buffer_id) + .map(|excerpt| excerpt.buffer_snapshot(self).remote_id()) } /// Resolves the given [`text::Anchor`]s to [`crate::Anchor`]s if the anchor is within a visible excerpt. @@ -4236,69 +3552,66 @@ impl MultiBufferSnapshot { let anchors = anchors.into_iter(); let mut result = Vec::with_capacity(anchors.size_hint().0); let mut anchors = anchors.peekable(); - let mut cursor = self.excerpts.cursor::>(()); + let mut cursor = self.excerpts.cursor::(()); 'anchors: while let Some(anchor) = anchors.peek() { - let Some(buffer_id) = anchor.buffer_id else { - anchors.next(); - result.push(None); - continue 'anchors; - }; - let mut same_buffer_anchors = - anchors.peeking_take_while(|a| a.buffer_id.is_some_and(|b| buffer_id == b)); + let buffer_id = anchor.buffer_id; + let mut same_buffer_anchors = anchors.peeking_take_while(|a| a.buffer_id == buffer_id); - if let Some(locators) = self.buffer_locators.get(&buffer_id) { + if let Some(buffer) = self.buffers.get(&buffer_id) { + let path = &buffer.path_key; let Some(mut next) = same_buffer_anchors.next() else { continue 'anchors; }; - 'excerpts: for locator in locators.iter() { - if cursor.seek_forward(&Some(locator), Bias::Left) - && let Some(excerpt) = cursor.item() - { - loop { - // anchor is before the first excerpt - if excerpt - .range - .context - .start - .cmp(&next, &excerpt.buffer) - .is_gt() - { - // so we skip it and try the next anchor - result.push(None); - match same_buffer_anchors.next() { - Some(anchor) => next = anchor, - None => continue 'anchors, - } - // anchor is within the excerpt - } else if excerpt - .range - .context - .end - .cmp(&next, &excerpt.buffer) - .is_ge() - { - // record it and all following anchors that are within - result.push(Some(Anchor::in_buffer(excerpt.id, next))); - result.extend( - same_buffer_anchors - .peeking_take_while(|a| { - excerpt - .range - .context - .end - .cmp(a, &excerpt.buffer) - .is_ge() - }) - .map(|a| Some(Anchor::in_buffer(excerpt.id, a))), - ); - match same_buffer_anchors.next() { - Some(anchor) => next = anchor, - None => continue 'anchors, - } - // anchor is after the excerpt, try the next one - } else { - continue 'excerpts; + cursor.seek_forward(path, Bias::Left); + 'excerpts: loop { + let Some(excerpt) = cursor.item() else { + break; + }; + if &excerpt.path_key != path { + break; + } + let buffer_snapshot = excerpt.buffer_snapshot(self); + + loop { + // anchor is before the first excerpt + if excerpt + .range + .context + .start + .cmp(&next, &buffer_snapshot) + .is_gt() + { + // so we skip it and try the next anchor + result.push(None); + match same_buffer_anchors.next() { + Some(anchor) => next = anchor, + None => continue 'anchors, } + // anchor is within the excerpt + } else if excerpt + .range + .context + .end + .cmp(&next, &buffer_snapshot) + .is_ge() + { + // record it and all following anchors that are within + result.push(Some(Anchor::in_buffer(excerpt.path_key_index, next))); + result.extend( + same_buffer_anchors + .peeking_take_while(|a| { + excerpt.range.context.end.cmp(a, &buffer_snapshot).is_ge() + }) + .map(|a| Some(Anchor::in_buffer(excerpt.path_key_index, a))), + ); + match same_buffer_anchors.next() { + Some(anchor) => next = anchor, + None => continue 'anchors, + } + // anchor is after the excerpt, try the next one + } else { + cursor.next(); + continue 'excerpts; } } } @@ -4311,79 +3624,31 @@ impl MultiBufferSnapshot { result } - pub fn ranges_to_buffer_ranges( + pub fn range_to_buffer_ranges( &self, - ranges: impl Iterator>, - ) -> impl Iterator, ExcerptId)> { - ranges.flat_map(|range| { - self.range_to_buffer_ranges((Bound::Included(range.start), Bound::Included(range.end))) - .into_iter() - }) - } - - pub fn range_to_buffer_ranges( - &self, - range: R, - ) -> Vec<(&BufferSnapshot, Range, ExcerptId)> - where - R: RangeBounds, - T: ToOffset, - { - self.range_to_buffer_ranges_with_context(range) - .into_iter() - .map(|(buffer, range, id, _context)| (buffer, range, id)) - .collect() - } - - pub fn range_to_buffer_ranges_with_context( - &self, - range: R, + range: Range, ) -> Vec<( - &BufferSnapshot, + BufferSnapshot, Range, - ExcerptId, - Range, - )> - where - R: RangeBounds, - T: ToOffset, - { - let start = match range.start_bound() { - Bound::Included(start) => start.to_offset(self), - Bound::Excluded(_) => panic!("excluded start bound not supported"), - Bound::Unbounded => MultiBufferOffset::ZERO, - }; - let end_bound = match range.end_bound() { - Bound::Included(end) => Bound::Included(end.to_offset(self)), - Bound::Excluded(end) => Bound::Excluded(end.to_offset(self)), - Bound::Unbounded => Bound::Unbounded, - }; - let bounds = (Bound::Included(start), end_bound); - + ExcerptRange, + )> { let mut cursor = self.cursor::(); + let start = range.start.to_offset(self); + let end = range.end.to_offset(self); cursor.seek(&start); let mut result: Vec<( - &BufferSnapshot, + BufferSnapshot, Range, - ExcerptId, - Range, + ExcerptRange, )> = Vec::new(); while let Some(region) = cursor.region() { - let dominated_by_end_bound = match end_bound { - Bound::Included(end) => region.range.start > end, - Bound::Excluded(end) => region.range.start >= end, - Bound::Unbounded => false, - }; - if dominated_by_end_bound { + if region.range.start >= end { break; } if region.is_main_buffer { let start_overshoot = start.saturating_sub(region.range.start); - let end_offset = match end_bound { - Bound::Included(end) | Bound::Excluded(end) => end, - Bound::Unbounded => region.range.end, - }; + let end_offset = end; let end_overshoot = end_offset.saturating_sub(region.range.start); let start = region .buffer_range @@ -4393,34 +3658,46 @@ impl MultiBufferSnapshot { .buffer_range .end .min(region.buffer_range.start + end_overshoot); - let context = region.excerpt.range.context.clone(); - if let Some(prev) = result.last_mut().filter(|(_, prev_range, excerpt_id, _)| { - *excerpt_id == region.excerpt.id && prev_range.end == start - }) { + let excerpt_range = region.excerpt.range.clone(); + if let Some(prev) = + result + .last_mut() + .filter(|(prev_buffer, prev_range, prev_excerpt)| { + prev_buffer.remote_id() == region.buffer.remote_id() + && prev_range.end == start + && prev_excerpt.context.start == excerpt_range.context.start + }) + { prev.1.end = end; } else { - result.push((region.buffer, start..end, region.excerpt.id, context)); + result.push((region.buffer.clone(), start..end, excerpt_range)); } } cursor.next(); } - if let Some(excerpt) = cursor.excerpt() { - let dominated_by_prev_excerpt = - result.last().is_some_and(|(_, _, id, _)| *id == excerpt.id); - if !dominated_by_prev_excerpt && excerpt.text_summary.len == 0 { - let excerpt_position = self.len(); - if bounds.contains(&excerpt_position) { - let buffer_offset = - BufferOffset(excerpt.range.context.start.to_offset(&excerpt.buffer)); - let context = excerpt.range.context.clone(); - result.push(( - &excerpt.buffer, - buffer_offset..buffer_offset, - excerpt.id, - context, - )); - } + if let Some(excerpt) = cursor.excerpt() + && excerpt.text_summary.len == 0 + && end == self.len() + { + let buffer_snapshot = excerpt.buffer_snapshot(self); + + let buffer_offset = + BufferOffset(excerpt.range.context.start.to_offset(buffer_snapshot)); + let excerpt_range = excerpt.range.clone(); + if result + .last_mut() + .is_none_or(|(prev_buffer, prev_range, prev_excerpt)| { + prev_buffer.remote_id() != buffer_snapshot.remote_id() + || prev_range.end != buffer_offset + || prev_excerpt.context.start != excerpt_range.context.start + }) + { + result.push(( + buffer_snapshot.clone(), + buffer_offset..buffer_offset, + excerpt_range, + )); } } @@ -4430,14 +3707,7 @@ impl MultiBufferSnapshot { pub fn range_to_buffer_ranges_with_deleted_hunks( &self, range: Range, - ) -> impl Iterator< - Item = ( - &BufferSnapshot, - Range, - ExcerptId, - Option, - ), - > + '_ { + ) -> impl Iterator, Option)> + '_ { let start = range.start.to_offset(self); let end = range.end.to_offset(self); @@ -4460,18 +3730,12 @@ impl MultiBufferSnapshot { .end .min(region.buffer_range.start + end_overshoot); - let region_excerpt_id = region.excerpt.id; let deleted_hunk_anchor = if region.is_main_buffer { None } else { Some(self.anchor_before(region.range.start)) }; - let result = ( - region.buffer, - start..end, - region_excerpt_id, - deleted_hunk_anchor, - ); + let result = (region.buffer, start..end, deleted_hunk_anchor); cursor.next(); Some(result) }) @@ -4504,7 +3768,7 @@ impl MultiBufferSnapshot { + AddAssign + Ord, { - let mut current_excerpt_metadata: Option<(ExcerptId, I)> = None; + let mut current_excerpt_metadata: Option<(ExcerptRange, I)> = None; let mut cursor = self.cursor::(); // Find the excerpt and buffer offset where the given range ends. @@ -4519,7 +3783,7 @@ impl MultiBufferSnapshot { ::default() }; buffer_end = buffer_end + overshoot; - range_end = Some((region.excerpt.id, buffer_end)); + range_end = Some((region.excerpt.range.clone(), buffer_end)); break; } cursor.next(); @@ -4544,11 +3808,12 @@ impl MultiBufferSnapshot { iter::from_fn(move || { loop { let excerpt = cursor.excerpt()?; + let buffer_snapshot = excerpt.buffer_snapshot(self); // If we have already retrieved metadata for this excerpt, continue to use it. let metadata_iter = if let Some((_, metadata)) = current_excerpt_metadata .as_mut() - .filter(|(excerpt_id, _)| *excerpt_id == excerpt.id) + .filter(|(excerpt_info, _)| excerpt_info == &excerpt.range) { Some(metadata) } @@ -4571,16 +3836,20 @@ impl MultiBufferSnapshot { .range .context .end - .summary::(&excerpt.buffer); - if let Some((end_excerpt_id, end_buffer_offset)) = range_end - && excerpt.id == end_excerpt_id + .summary::(&buffer_snapshot); + if let Some((end_excerpt, end_buffer_offset)) = &range_end + && &excerpt.range == end_excerpt { - buffer_end = buffer_end.min(end_buffer_offset); + buffer_end = buffer_end.min(*end_buffer_offset); } - get_buffer_metadata(&excerpt.buffer, buffer_start..buffer_end).map(|iterator| { - &mut current_excerpt_metadata.insert((excerpt.id, iterator)).1 - }) + get_buffer_metadata(&buffer_snapshot, buffer_start..buffer_end).map( + |iterator| { + &mut current_excerpt_metadata + .insert((excerpt.range.clone(), iterator)) + .1 + }, + ) }; // Visit each metadata item. @@ -4644,8 +3913,8 @@ impl MultiBufferSnapshot { // When there are no more metadata items for this excerpt, move to the next excerpt. else { current_excerpt_metadata.take(); - if let Some((end_excerpt_id, _)) = range_end - && excerpt.id == end_excerpt_id + if let Some((end_excerpt, _)) = &range_end + && &excerpt.range == end_excerpt { return None; } @@ -4668,12 +3937,14 @@ impl MultiBufferSnapshot { cursor.seek_to_start_of_current_excerpt(); let excerpt = cursor.excerpt()?; - let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer); - let excerpt_end = excerpt.range.context.end.to_offset(&excerpt.buffer); - let current_position = self - .anchor_before(offset) - .text_anchor - .to_offset(&excerpt.buffer); + let buffer = excerpt.buffer_snapshot(self); + let excerpt_start = excerpt.range.context.start.to_offset(buffer); + let excerpt_end = excerpt.range.context.end.to_offset(buffer); + let current_position = match self.anchor_before(offset) { + Anchor::Min => 0, + Anchor::Excerpt(excerpt_anchor) => excerpt_anchor.text_anchor().to_offset(buffer), + Anchor::Max => unreachable!(), + }; if let Some(diff) = self.diff_state(excerpt.buffer_id) { if let Some(main_buffer) = &diff.main_buffer { @@ -4683,24 +3954,22 @@ impl MultiBufferSnapshot { if hunk.diff_base_byte_range.end >= current_position { continue; } - let hunk_start = excerpt.buffer.anchor_after(hunk.diff_base_byte_range.start); - let start = Anchor::in_buffer(excerpt.id, hunk_start).to_point(self); + let hunk_start = buffer.anchor_after(hunk.diff_base_byte_range.start); + let start = + Anchor::in_buffer(excerpt.path_key_index, hunk_start).to_point(self); return Some(MultiBufferRow(start.row)); } } else { - let excerpt_end = excerpt - .buffer - .anchor_before(excerpt_end.min(current_position)); - for hunk in diff.hunks_intersecting_range_rev( - excerpt.range.context.start..excerpt_end, - &excerpt.buffer, - ) { - let hunk_end = hunk.buffer_range.end.to_offset(&excerpt.buffer); + let excerpt_end = buffer.anchor_before(excerpt_end.min(current_position)); + for hunk in diff + .hunks_intersecting_range_rev(excerpt.range.context.start..excerpt_end, buffer) + { + let hunk_end = hunk.buffer_range.end.to_offset(buffer); if hunk_end >= current_position { continue; } - let start = - Anchor::in_buffer(excerpt.id, hunk.buffer_range.start).to_point(self); + let start = Anchor::in_buffer(excerpt.path_key_index, hunk.buffer_range.start) + .to_point(self); return Some(MultiBufferRow(start.row)); } } @@ -4709,6 +3978,7 @@ impl MultiBufferSnapshot { loop { cursor.prev_excerpt(); let excerpt = cursor.excerpt()?; + let buffer = excerpt.buffer_snapshot(self); let Some(diff) = self.diff_state(excerpt.buffer_id) else { continue; @@ -4716,24 +3986,25 @@ impl MultiBufferSnapshot { if let Some(main_buffer) = &diff.main_buffer { let Some(hunk) = diff .hunks_intersecting_base_text_range_rev( - excerpt.range.context.to_offset(&excerpt.buffer), + excerpt.range.context.to_offset(buffer), main_buffer, ) .next() else { continue; }; - let hunk_start = excerpt.buffer.anchor_after(hunk.diff_base_byte_range.start); - let start = Anchor::in_buffer(excerpt.id, hunk_start).to_point(self); + let hunk_start = buffer.anchor_after(hunk.diff_base_byte_range.start); + let start = Anchor::in_buffer(excerpt.path_key_index, hunk_start).to_point(self); return Some(MultiBufferRow(start.row)); } else { let Some(hunk) = diff - .hunks_intersecting_range_rev(excerpt.range.context.clone(), &excerpt.buffer) + .hunks_intersecting_range_rev(excerpt.range.context.clone(), buffer) .next() else { continue; }; - let start = Anchor::in_buffer(excerpt.id, hunk.buffer_range.start).to_point(self); + let start = Anchor::in_buffer(excerpt.path_key_index, hunk.buffer_range.start) + .to_point(self); return Some(MultiBufferRow(start.row)); } } @@ -4808,16 +4079,17 @@ impl MultiBufferSnapshot { .map(|ch| classifier.kind(ch)) } + pub fn all_buffer_ids(&self) -> impl Iterator + '_ { + self.buffers.iter().map(|(id, _)| *id) + } + pub fn is_singleton(&self) -> bool { self.singleton } - pub fn as_singleton(&self) -> Option<(ExcerptId, BufferId, &BufferSnapshot)> { - if self.singleton { - self.excerpts - .iter() - .next() - .map(|e| (e.id, e.buffer_id, &*e.buffer)) + pub fn as_singleton(&self) -> Option<&BufferSnapshot> { + if self.is_singleton() { + Some(self.excerpts.first()?.buffer_snapshot(&self)) } else { None } @@ -4914,11 +4186,11 @@ impl MultiBufferSnapshot { range: MultiBufferOffset::ZERO..MultiBufferOffset::ZERO, excerpts: self.excerpts.cursor(()), diff_transforms: self.diff_transforms.cursor(()), - diffs: &self.diffs, diff_base_chunks: None, excerpt_chunks: None, buffer_chunk: None, language_aware, + snapshot: self, }; let range = range.start.to_offset(self)..range.end.to_offset(self); chunks.seek(range); @@ -5065,7 +4337,7 @@ impl MultiBufferSnapshot { && !region.is_main_buffer { let main_buffer_position = cursor.main_buffer_position()?; - let buffer_snapshot = &cursor.excerpt()?.buffer; + let buffer_snapshot = cursor.excerpt()?.buffer_snapshot(self); return Some((buffer_snapshot, main_buffer_position)); } else if buffer_offset > BufferOffset(region.buffer.len()) { return None; @@ -5073,10 +4345,7 @@ impl MultiBufferSnapshot { Some((region.buffer, buffer_offset)) } - pub fn point_to_buffer_point( - &self, - point: Point, - ) -> Option<(&BufferSnapshot, Point, ExcerptId)> { + pub fn point_to_buffer_point(&self, point: Point) -> Option<(&BufferSnapshot, Point)> { let mut cursor = self.cursor::(); cursor.seek(&point); let region = cursor.region()?; @@ -5087,11 +4356,14 @@ impl MultiBufferSnapshot { && region.has_trailing_newline && !region.is_main_buffer { - return Some((&excerpt.buffer, cursor.main_buffer_position()?, excerpt.id)); + return Some(( + &excerpt.buffer_snapshot(self), + cursor.main_buffer_position()?, + )); } else if buffer_point > region.buffer.max_point() { return None; } - Some((region.buffer, buffer_point, excerpt.id)) + Some((region.buffer, buffer_point)) } pub fn suggested_indents( @@ -5408,21 +4680,20 @@ impl MultiBufferSnapshot { let mut cursor = self.excerpts.cursor::(()); cursor.seek(&range.start, Bias::Right); if let Some(excerpt) = cursor.item() { + let buffer_snapshot = excerpt.buffer_snapshot(self); let mut end_before_newline = cursor.end(); if excerpt.has_trailing_newline { end_before_newline -= 1; } - let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer); + let excerpt_start = excerpt.range.context.start.to_offset(&buffer_snapshot); let start_in_excerpt = excerpt_start + (range.start - *cursor.start()); let end_in_excerpt = excerpt_start + (cmp::min(end_before_newline, range.end) - *cursor.start()); summary.add_text_dim( - &excerpt - .buffer - .text_summary_for_range::( - start_in_excerpt..end_in_excerpt, - ), + &buffer_snapshot.text_summary_for_range::( + start_in_excerpt..end_in_excerpt, + ), ); if range.end > end_before_newline { @@ -5437,16 +4708,15 @@ impl MultiBufferSnapshot { .summary::<_, ExcerptDimension>(&range.end, Bias::Right) .0; if let Some(excerpt) = cursor.item() { + let buffer_snapshot = excerpt.buffer_snapshot(self); range.end = cmp::max(*cursor.start(), range.end); - let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer); + let excerpt_start = excerpt.range.context.start.to_offset(&buffer_snapshot); let end_in_excerpt = excerpt_start + (range.end - *cursor.start()); summary.add_text_dim( - &excerpt - .buffer - .text_summary_for_range::( - excerpt_start..end_in_excerpt, - ), + &buffer_snapshot.text_summary_for_range::( + excerpt_start..end_in_excerpt, + ), ); } } @@ -5464,38 +4734,42 @@ impl MultiBufferSnapshot { + Add, MBD::TextDimension: Sub + Ord, { - let excerpt_id = self.latest_excerpt_id(anchor.excerpt_id); - let locator = self.excerpt_locator_for_id(excerpt_id); - let (start, _, mut item) = self - .excerpts - .find::((), locator, Bias::Left); - let mut start = MBD::from_summary(&start.text); - if item.is_none() && excerpt_id == ExcerptId::max() { - item = self.excerpts.last(); - if let Some(last_summary) = self.excerpts.last_summary() { - start = start - ::from_text_summary(&last_summary.text.into()); + let target = anchor.seek_target(self); + let anchor = match anchor { + Anchor::Min => { + return MBD::default(); } - } + Anchor::Excerpt(excerpt_anchor) => excerpt_anchor, + Anchor::Max => { + return MBD::from_summary(&self.text_summary()); + } + }; + + let (start, _, item) = self + .excerpts + .find::((), &target, Bias::Left); + let start = MBD::from_summary(&start.text); let excerpt_start_position = ExcerptDimension(start); if self.diff_transforms.is_empty() { if let Some(excerpt) = item { - if excerpt.id != excerpt_id && excerpt_id != ExcerptId::max() { + if !excerpt.contains(anchor, self) { return excerpt_start_position.0; } + let buffer_snapshot = excerpt.buffer_snapshot(self); let excerpt_buffer_start = excerpt .range .context .start - .summary::(&excerpt.buffer); + .summary::(&buffer_snapshot); let excerpt_buffer_end = excerpt .range .context .end - .summary::(&excerpt.buffer); + .summary::(&buffer_snapshot); let buffer_summary = anchor - .text_anchor - .summary::(&excerpt.buffer); + .text_anchor() + .summary::(&buffer_snapshot); let summary = cmp::min(excerpt_buffer_end, buffer_summary); let mut position = excerpt_start_position; if summary > excerpt_buffer_start { @@ -5510,48 +4784,47 @@ impl MultiBufferSnapshot { let mut diff_transforms_cursor = self .diff_transforms .cursor::, OutputDimension>>(()); - diff_transforms_cursor.next(); if let Some(excerpt) = item { - if excerpt.id != excerpt_id && excerpt_id != ExcerptId::max() { - return self.resolve_summary_for_min_or_max_anchor( - &Anchor::min(), + if !excerpt.contains(anchor, self) { + diff_transforms_cursor.seek(&excerpt_start_position, Bias::Left); + return self.summary_for_excerpt_position_without_hunks( + Bias::Left, excerpt_start_position, &mut diff_transforms_cursor, ); } + let buffer_snapshot = excerpt.buffer_snapshot(self); let excerpt_buffer_start = excerpt .range .context .start - .summary::(&excerpt.buffer); + .summary::(&buffer_snapshot); let excerpt_buffer_end = excerpt .range .context .end - .summary::(&excerpt.buffer); + .summary::(&buffer_snapshot); let buffer_summary = anchor - .text_anchor - .summary::(&excerpt.buffer); + .text_anchor() + .summary::(&buffer_snapshot); let summary = cmp::min(excerpt_buffer_end, buffer_summary); let mut position = excerpt_start_position; if summary > excerpt_buffer_start { position += summary - excerpt_buffer_start; } - if diff_transforms_cursor.start().0 < position { - diff_transforms_cursor.seek_forward(&position, Bias::Left); - } - self.resolve_summary_for_anchor( - &anchor, + diff_transforms_cursor.seek(&position, Bias::Left); + self.summary_for_anchor_with_excerpt_position( + *anchor, position, &mut diff_transforms_cursor, - &excerpt.buffer, + &buffer_snapshot, ) } else { - diff_transforms_cursor.seek_forward(&excerpt_start_position, Bias::Left); - self.resolve_summary_for_min_or_max_anchor( - &Anchor::max(), + diff_transforms_cursor.seek(&excerpt_start_position, Bias::Left); + self.summary_for_excerpt_position_without_hunks( + Bias::Right, excerpt_start_position, &mut diff_transforms_cursor, ) @@ -5562,9 +4835,9 @@ impl MultiBufferSnapshot { /// Maps an anchor's excerpt-space position to its output-space position by /// walking the diff transforms. The cursor is shared across consecutive /// calls, so it may already be partway through the transform list. - fn resolve_summary_for_anchor( + fn summary_for_anchor_with_excerpt_position( &self, - anchor: &Anchor, + anchor: ExcerptAnchor, excerpt_position: ExcerptDimension, diff_transforms: &mut Cursor< DiffTransform, @@ -5595,7 +4868,7 @@ impl MultiBufferSnapshot { hunk_info, .. }) => { - if let Some(diff_base_anchor) = &anchor.diff_base_anchor + if let Some(diff_base_anchor) = anchor.diff_base_anchor && let Some(base_text) = self.diff_state(*buffer_id).map(|diff| diff.base_text()) && diff_base_anchor.is_valid(&base_text) @@ -5619,7 +4892,7 @@ impl MultiBufferSnapshot { } } else if at_transform_end && anchor - .text_anchor + .text_anchor() .cmp(&hunk_info.hunk_start_anchor, excerpt_buffer) .is_gt() { @@ -5654,9 +4927,9 @@ impl MultiBufferSnapshot { } /// Like `resolve_summary_for_anchor` but optimized for min/max anchors. - fn resolve_summary_for_min_or_max_anchor( + fn summary_for_excerpt_position_without_hunks( &self, - anchor: &Anchor, + bias: Bias, excerpt_position: ExcerptDimension, diff_transforms: &mut Cursor< DiffTransform, @@ -5673,7 +4946,7 @@ impl MultiBufferSnapshot { // A right-biased anchor at a transform boundary belongs to the // *next* transform, so advance past the current one. - if anchor.text_anchor.bias == Bias::Right && at_transform_end { + if bias == Bias::Right && at_transform_end { diff_transforms.next(); continue; } @@ -5689,27 +4962,27 @@ impl MultiBufferSnapshot { } fn excerpt_offset_for_anchor(&self, anchor: &Anchor) -> ExcerptOffset { - let mut cursor = self - .excerpts - .cursor::, ExcerptOffset>>(()); - let locator = self.excerpt_locator_for_id(anchor.excerpt_id); + let anchor = match anchor { + Anchor::Min => return ExcerptOffset::default(), + Anchor::Excerpt(excerpt_anchor) => excerpt_anchor, + Anchor::Max => return self.excerpts.summary().len(), + }; + let mut cursor = self.excerpts.cursor::(()); + let target = anchor.seek_target(self); - cursor.seek(&Some(locator), Bias::Left); - if cursor.item().is_none() && anchor.excerpt_id == ExcerptId::max() { - cursor.prev(); - } + cursor.seek(&target, Bias::Left); - let mut position = cursor.start().1; + let mut position = cursor.start().len(); if let Some(excerpt) = cursor.item() - && (excerpt.id == anchor.excerpt_id || anchor.excerpt_id == ExcerptId::max()) + && excerpt.contains(anchor, self) { - let excerpt_buffer_start = excerpt - .buffer - .offset_for_anchor(&excerpt.range.context.start); - let excerpt_buffer_end = excerpt.buffer.offset_for_anchor(&excerpt.range.context.end); + let buffer_snapshot = excerpt.buffer_snapshot(self); + let excerpt_buffer_start = + buffer_snapshot.offset_for_anchor(&excerpt.range.context.start); + let excerpt_buffer_end = buffer_snapshot.offset_for_anchor(&excerpt.range.context.end); let buffer_position = cmp::min( excerpt_buffer_end, - excerpt.buffer.offset_for_anchor(&anchor.text_anchor), + buffer_snapshot.offset_for_anchor(&anchor.text_anchor()), ); if buffer_position > excerpt_buffer_start { position += buffer_position - excerpt_buffer_start; @@ -5718,13 +4991,6 @@ impl MultiBufferSnapshot { position } - pub fn latest_excerpt_id(&self, mut excerpt_id: ExcerptId) -> ExcerptId { - while let Some(replacement) = self.replaced_excerpts.get(&excerpt_id) { - excerpt_id = *replacement; - } - excerpt_id - } - pub fn summaries_for_anchors<'a, MBD, I>(&'a self, anchors: I) -> Vec where MBD: MultiBufferDimension @@ -5743,43 +5009,57 @@ impl MultiBufferSnapshot { let mut summaries = Vec::new(); while let Some(anchor) = anchors.peek() { - let excerpt_id = self.latest_excerpt_id(anchor.excerpt_id); - - let excerpt_anchors = anchors.peeking_take_while(|anchor| { - self.latest_excerpt_id(anchor.excerpt_id) == excerpt_id - }); + let target = anchor.seek_target(self); + let excerpt_anchor = match anchor { + Anchor::Min => { + summaries.push(MBD::default()); + anchors.next(); + continue; + } + Anchor::Excerpt(excerpt_anchor) => excerpt_anchor, + Anchor::Max => { + summaries.push(MBD::from_summary(&self.text_summary())); + anchors.next(); + continue; + } + }; - let locator = self.excerpt_locator_for_id(excerpt_id); - cursor.seek_forward(locator, Bias::Left); - if cursor.item().is_none() && excerpt_id == ExcerptId::max() { - cursor.prev(); - } + cursor.seek_forward(&target, Bias::Left); let excerpt_start_position = ExcerptDimension(MBD::from_summary(&cursor.start().text)); if let Some(excerpt) = cursor.item() { - if excerpt.id != excerpt_id && excerpt_id != ExcerptId::max() { - let position = self.resolve_summary_for_min_or_max_anchor( - &Anchor::min(), + let buffer_snapshot = excerpt.buffer_snapshot(self); + if !excerpt.contains(&excerpt_anchor, self) { + diff_transforms_cursor.seek_forward(&excerpt_start_position, Bias::Left); + let position = self.summary_for_excerpt_position_without_hunks( + Bias::Left, excerpt_start_position, &mut diff_transforms_cursor, ); - summaries.extend(excerpt_anchors.map(|_| position)); + summaries.push(position); + anchors.next(); continue; } let excerpt_buffer_start = excerpt .range .context .start - .summary::(&excerpt.buffer); + .summary::(buffer_snapshot); let excerpt_buffer_end = excerpt .range .context .end - .summary::(&excerpt.buffer); - for (buffer_summary, anchor) in excerpt - .buffer + .summary::(buffer_snapshot); + for (buffer_summary, excerpt_anchor) in buffer_snapshot .summaries_for_anchors_with_payload::( - excerpt_anchors.map(|a| (&a.text_anchor, a)), + std::iter::from_fn(|| { + let excerpt_anchor = anchors.peek()?.excerpt_anchor()?; + if !excerpt.contains(&excerpt_anchor, self) { + return None; + } + anchors.next(); + Some((excerpt_anchor.text_anchor(), excerpt_anchor)) + }), ) { let summary = cmp::min(excerpt_buffer_end, buffer_summary); @@ -5792,21 +5072,22 @@ impl MultiBufferSnapshot { diff_transforms_cursor.seek_forward(&position, Bias::Left); } - summaries.push(self.resolve_summary_for_anchor( - anchor, + summaries.push(self.summary_for_anchor_with_excerpt_position( + excerpt_anchor, position, &mut diff_transforms_cursor, - &excerpt.buffer, + &buffer_snapshot, )); } } else { diff_transforms_cursor.seek_forward(&excerpt_start_position, Bias::Left); - let position = self.resolve_summary_for_min_or_max_anchor( - &Anchor::max(), + let position = self.summary_for_excerpt_position_without_hunks( + Bias::Right, excerpt_start_position, &mut diff_transforms_cursor, ); - summaries.extend(excerpt_anchors.map(|_| position)); + summaries.push(position); + anchors.next(); } } @@ -5853,92 +5134,27 @@ impl MultiBufferSnapshot { }) } - pub fn refresh_anchors<'a, I>(&'a self, anchors: I) -> Vec<(usize, Anchor, bool)> - where - I: 'a + IntoIterator, - { - let mut anchors = anchors.into_iter().enumerate().peekable(); - let mut cursor = self.excerpts.cursor::>(()); - cursor.next(); - - let mut result = Vec::new(); - - while let Some((_, anchor)) = anchors.peek() { - let old_excerpt_id = anchor.excerpt_id; - - // Find the location where this anchor's excerpt should be. - let old_locator = self.excerpt_locator_for_id(old_excerpt_id); - cursor.seek_forward(&Some(old_locator), Bias::Left); - - let next_excerpt = cursor.item(); - let prev_excerpt = cursor.prev_item(); - - // Process all of the anchors for this excerpt. - while let Some((anchor_ix, &anchor)) = - anchors.next_if(|(_, anchor)| anchor.excerpt_id == old_excerpt_id) - { - let mut anchor = anchor; - - // Leave min and max anchors unchanged if invalid or - // if the old excerpt still exists at this location - let mut kept_position = next_excerpt - .is_some_and(|e| e.id == old_excerpt_id && e.contains(&anchor)) - || old_excerpt_id == ExcerptId::max() - || old_excerpt_id == ExcerptId::min(); - - // If the old excerpt no longer exists at this location, then attempt to - // find an equivalent position for this anchor in an adjacent excerpt. - if !kept_position { - for excerpt in [next_excerpt, prev_excerpt].iter().filter_map(|e| *e) { - if excerpt.contains(&anchor) { - anchor.excerpt_id = excerpt.id; - kept_position = true; - break; - } - } - } - - // If there's no adjacent excerpt that contains the anchor's position, - // then report that the anchor has lost its position. - if !kept_position { - anchor = if let Some(excerpt) = next_excerpt { - let mut text_anchor = excerpt - .range - .context - .start - .bias(anchor.text_anchor.bias, &excerpt.buffer); - if text_anchor - .cmp(&excerpt.range.context.end, &excerpt.buffer) - .is_gt() - { - text_anchor = excerpt.range.context.end; - } - Anchor::in_buffer(excerpt.id, text_anchor) - } else if let Some(excerpt) = prev_excerpt { - let mut text_anchor = excerpt - .range - .context - .end - .bias(anchor.text_anchor.bias, &excerpt.buffer); - if text_anchor - .cmp(&excerpt.range.context.start, &excerpt.buffer) - .is_lt() - { - text_anchor = excerpt.range.context.start; - } - Anchor::in_buffer(excerpt.id, text_anchor) - } else if anchor.text_anchor.bias == Bias::Left { - Anchor::min() - } else { - Anchor::max() - }; + pub fn excerpts_for_buffer( + &self, + buffer_id: BufferId, + ) -> impl Iterator> { + if let Some(buffer_state) = self.buffers.get(&buffer_id) { + let path_key = buffer_state.path_key.clone(); + let mut cursor = self.excerpts.cursor::(()); + cursor.seek_forward(&path_key, Bias::Left); + Some(iter::from_fn(move || { + let excerpt = cursor.item()?; + if excerpt.path_key != path_key { + return None; } - - result.push((anchor_ix, anchor, kept_position)); - } + cursor.next(); + Some(excerpt.range.clone()) + })) + } else { + None } - result.sort_unstable_by(|a, b| a.1.cmp(&b.1, self)); - result + .into_iter() + .flatten() } pub fn anchor_before(&self, position: T) -> Anchor { @@ -5993,132 +5209,158 @@ impl MultiBufferSnapshot { let mut excerpts = self .excerpts - .cursor::>>(()); + .cursor::>(()); excerpts.seek(&excerpt_offset, Bias::Right); if excerpts.item().is_none() && excerpt_offset == excerpts.start().0 && bias == Bias::Left { excerpts.prev(); } if let Some(excerpt) = excerpts.item() { + let buffer_snapshot = excerpt.buffer_snapshot(self); let mut overshoot = excerpt_offset.saturating_sub(excerpts.start().0); if excerpt.has_trailing_newline && excerpt_offset == excerpts.end().0 { overshoot -= 1; bias = Bias::Right; } - let buffer_start = excerpt.range.context.start.to_offset(&excerpt.buffer); - let text_anchor = - excerpt.clip_anchor(excerpt.buffer.anchor_at(buffer_start + overshoot, bias)); - let anchor = Anchor::in_buffer(excerpt.id, text_anchor); - match diff_base_anchor { + let buffer_start = excerpt.range.context.start.to_offset(&buffer_snapshot); + let text_anchor = excerpt.clip_anchor( + buffer_snapshot.anchor_at(buffer_start + overshoot, bias), + self, + ); + let anchor = ExcerptAnchor::in_buffer(excerpt.path_key_index, text_anchor); + let anchor = match diff_base_anchor { Some(diff_base_anchor) => anchor.with_diff_base_anchor(diff_base_anchor), None => anchor, - } + }; + anchor.into() } else if excerpt_offset == ExcerptDimension(MultiBufferOffset::ZERO) && bias == Bias::Left { - Anchor::min() + Anchor::Min } else { - Anchor::max() + Anchor::Max } } - /// Wraps the [`text::Anchor`] in a [`crate::Anchor`] if this multi-buffer is a singleton. - pub fn as_singleton_anchor(&self, text_anchor: text::Anchor) -> Option { - let (excerpt, buffer, _) = self.as_singleton()?; - if text_anchor.buffer_id.is_none_or(|id| id == buffer) { - Some(Anchor::in_buffer(excerpt, text_anchor)) - } else { - None - } + /// Lifts a buffer anchor to a multibuffer anchor without checking against excerpt boundaries. Returns `None` if there are no excerpts for the buffer + pub fn anchor_in_buffer(&self, anchor: text::Anchor) -> Option { + let path_key_index = self.path_key_index_for_buffer(anchor.buffer_id)?; + Some(Anchor::in_buffer(path_key_index, anchor)) } - /// Returns an anchor for the given excerpt and text anchor, - /// Returns [`None`] if the excerpt_id is no longer valid or the text anchor range is out of excerpt's bounds. - pub fn anchor_range_in_excerpt( - &self, - excerpt_id: ExcerptId, - text_anchor: Range, - ) -> Option> { - let excerpt = self.excerpt(self.latest_excerpt_id(excerpt_id))?; - - Some( - Self::anchor_in_excerpt_(excerpt, text_anchor.start)? - ..Self::anchor_in_excerpt_(excerpt, text_anchor.end)?, - ) - } + /// Creates a multibuffer anchor for the given buffer anchor, if it is contained in any excerpt. + pub fn anchor_in_excerpt(&self, text_anchor: text::Anchor) -> Option { + for excerpt in { + let this = &self; + let buffer_id = text_anchor.buffer_id; + if let Some(buffer_state) = this.buffers.get(&buffer_id) { + let path_key = buffer_state.path_key.clone(); + let mut cursor = this.excerpts.cursor::(()); + cursor.seek_forward(&path_key, Bias::Left); + Some(iter::from_fn(move || { + let excerpt = cursor.item()?; + if excerpt.path_key != path_key { + return None; + } + cursor.next(); + Some(excerpt) + })) + } else { + None + } + .into_iter() + .flatten() + } { + let buffer_snapshot = excerpt.buffer_snapshot(self); + if excerpt.range.contains(&text_anchor, &buffer_snapshot) { + return Some(Anchor::in_buffer(excerpt.path_key_index, text_anchor)); + } + } - /// Returns an anchor for the given excerpt and text anchor, - /// Returns [`None`] if the excerpt_id is no longer valid or the text anchor range is out of excerpt's bounds. - pub fn anchor_in_excerpt( - &self, - excerpt_id: ExcerptId, - text_anchor: text::Anchor, - ) -> Option { - let excerpt = self.excerpt(self.latest_excerpt_id(excerpt_id))?; - Self::anchor_in_excerpt_(excerpt, text_anchor) + None } - /// Same as [`MultiBuffer::anchor_in_excerpt`], but more efficient than calling it multiple times. - pub fn anchors_in_excerpt( + /// Creates a multibuffer anchor for the given buffer anchor, if it is contained in any excerpt. + pub fn buffer_anchor_range_to_anchor_range( &self, - excerpt_id: ExcerptId, - text_anchors: impl IntoIterator, - ) -> Option>> { - let excerpt = self.excerpt(self.latest_excerpt_id(excerpt_id))?; - Some( - text_anchors - .into_iter() - .map(|text_anchor| Self::anchor_in_excerpt_(excerpt, text_anchor)), - ) - } - - fn anchor_in_excerpt_(excerpt: &Excerpt, text_anchor: text::Anchor) -> Option { - match text_anchor.buffer_id { - Some(buffer_id) if buffer_id == excerpt.buffer_id => (), - Some(_) => return None, - None if text_anchor.is_max() || text_anchor.is_min() => { - return Some(Anchor::in_buffer(excerpt.id, text_anchor)); + text_anchor: Range, + ) -> Option> { + for excerpt in { + let this = &self; + let buffer_id = text_anchor.start.buffer_id; + if let Some(buffer_state) = this.buffers.get(&buffer_id) { + let path_key = buffer_state.path_key.clone(); + let mut cursor = this.excerpts.cursor::(()); + cursor.seek_forward(&path_key, Bias::Left); + Some(iter::from_fn(move || { + let excerpt = cursor.item()?; + if excerpt.path_key != path_key { + return None; + } + cursor.next(); + Some(excerpt) + })) + } else { + None + } + .into_iter() + .flatten() + } { + let buffer_snapshot = excerpt.buffer_snapshot(self); + if excerpt.range.contains(&text_anchor.start, &buffer_snapshot) + && excerpt.range.contains(&text_anchor.end, &buffer_snapshot) + { + return Some(Anchor::range_in_buffer(excerpt.path_key_index, text_anchor)); } - None => return None, - } - - let context = &excerpt.range.context; - if context.start.cmp(&text_anchor, &excerpt.buffer).is_gt() - || context.end.cmp(&text_anchor, &excerpt.buffer).is_lt() - { - return None; } - Some(Anchor::in_buffer(excerpt.id, text_anchor)) - } - - pub fn context_range_for_excerpt(&self, excerpt_id: ExcerptId) -> Option> { - Some(self.excerpt(excerpt_id)?.range.context.clone()) + None } - pub fn excerpt_range_for_excerpt( + /// Returns a buffer anchor and its buffer snapshot for the given anchor, if it is in the multibuffer. + pub fn anchor_to_buffer_anchor( &self, - excerpt_id: ExcerptId, - ) -> Option> { - Some(self.excerpt(excerpt_id)?.range.clone()) + anchor: Anchor, + ) -> Option<(text::Anchor, &BufferSnapshot)> { + match anchor { + Anchor::Min => { + let excerpt = self.excerpts.first()?; + let buffer = excerpt.buffer_snapshot(self); + Some((excerpt.range.context.start, buffer)) + } + Anchor::Excerpt(excerpt_anchor) => { + let buffer = self.buffer_for_id(excerpt_anchor.buffer_id())?; + Some((excerpt_anchor.text_anchor, buffer)) + } + Anchor::Max => { + let excerpt = self.excerpts.last()?; + let buffer = excerpt.buffer_snapshot(self); + Some((excerpt.range.context.end, buffer)) + } + } } pub fn can_resolve(&self, anchor: &Anchor) -> bool { - if anchor.is_min() || anchor.is_max() { + match anchor { // todo(lw): should be `!self.is_empty()` - true - } else if let Some(excerpt) = self.excerpt(anchor.excerpt_id) { - excerpt.buffer.can_resolve(&anchor.text_anchor) - } else { - false + Anchor::Min | Anchor::Max => true, + Anchor::Excerpt(excerpt_anchor) => { + let Some(target) = excerpt_anchor.try_seek_target(self) else { + return false; + }; + let mut cursor = self.excerpts.cursor::(()); + cursor.seek(&target, Bias::Left); + let Some(excerpt) = cursor.item() else { + return false; + }; + excerpt + .buffer_snapshot(self) + .can_resolve(&excerpt_anchor.text_anchor()) + } } } - pub fn excerpts( - &self, - ) -> impl Iterator)> { - self.excerpts - .iter() - .map(|excerpt| (excerpt.id, &*excerpt.buffer, excerpt.range.clone())) + pub fn excerpts(&self) -> impl Iterator> { + self.excerpts.iter().map(|excerpt| excerpt.range.clone()) } fn cursor<'a, MBD, BD>(&'a self) -> MultiBufferCursor<'a, MBD, BD> @@ -6131,35 +5373,17 @@ impl MultiBufferSnapshot { MultiBufferCursor { excerpts, diff_transforms, - diffs: &self.diffs, cached_region: OnceCell::new(), + snapshot: self, } } - pub fn excerpt_before(&self, excerpt_id: ExcerptId) -> Option> { - let start_locator = self.excerpt_locator_for_id(excerpt_id); - let mut excerpts = self - .excerpts - .cursor::, ExcerptOffset>>(()); - excerpts.seek(&Some(start_locator), Bias::Left); + pub fn excerpt_before(&self, anchor: Anchor) -> Option> { + let target = anchor.try_seek_target(&self)?; + let mut excerpts = self.excerpts.cursor::(()); + excerpts.seek(&target, Bias::Left); excerpts.prev(); - - let mut diff_transforms = self - .diff_transforms - .cursor::>(()); - diff_transforms.seek(&excerpts.start().1, Bias::Left); - if diff_transforms.end().excerpt_dimension < excerpts.start().1 { - diff_transforms.next(); - } - - let excerpt = excerpts.item()?; - Some(MultiBufferExcerpt { - excerpt, - offset: diff_transforms.start().output_dimension.0, - buffer_offset: BufferOffset(excerpt.range.context.start.to_offset(&excerpt.buffer)), - excerpt_offset: excerpts.start().1, - diff_transforms, - }) + Some(excerpts.item()?.range.clone()) } pub fn excerpt_boundaries_in_range( @@ -6204,7 +5428,7 @@ impl MultiBufferSnapshot { } else { cursor.seek_to_start_of_current_excerpt(); } - let mut prev_region = cursor + let mut prev_excerpt = cursor .fetch_excerpt_with_range() .map(|(excerpt, _)| excerpt); @@ -6219,7 +5443,7 @@ impl MultiBufferSnapshot { let (next_excerpt, next_range) = cursor.fetch_excerpt_with_range()?; cursor.next_excerpt_forwards(); if !bounds.contains(&next_range.start.key) { - prev_region = Some(next_excerpt); + prev_excerpt = Some(next_excerpt); continue; } @@ -6230,18 +5454,20 @@ impl MultiBufferSnapshot { self.max_point() }; - let prev = prev_region.as_ref().map(|region| ExcerptInfo { - id: region.id, - buffer: region.buffer.clone(), - buffer_id: region.buffer_id, - range: region.range.clone(), + let prev = prev_excerpt.as_ref().map(|excerpt| ExcerptBoundaryInfo { + start_anchor: Anchor::in_buffer( + excerpt.path_key_index, + excerpt.range.context.start, + ), + range: excerpt.range.clone(), end_row: MultiBufferRow(next_region_start.row), }); - let next = ExcerptInfo { - id: next_excerpt.id, - buffer: next_excerpt.buffer.clone(), - buffer_id: next_excerpt.buffer_id, + let next = ExcerptBoundaryInfo { + start_anchor: Anchor::in_buffer( + next_excerpt.path_key_index, + next_excerpt.range.context.start, + ), range: next_excerpt.range.clone(), end_row: if next_excerpt.has_trailing_newline { MultiBufferRow(next_region_end.row - 1) @@ -6252,7 +5478,7 @@ impl MultiBufferSnapshot { let row = MultiBufferRow(next_region_start.row); - prev_region = Some(next_excerpt); + prev_excerpt = Some(next_excerpt); return Some(ExcerptBoundary { row, prev, next }); } @@ -6267,6 +5493,91 @@ impl MultiBufferSnapshot { self.non_text_state_update_count } + /// Allows converting several ranges within the same excerpt between buffer offsets and multibuffer offsets. + /// + /// If the input range is contained in a single excerpt, invokes the callback with the full range of that excerpt + /// and the input range both converted to buffer coordinates. The buffer ranges returned by the callback are lifted back + /// to multibuffer offsets and returned. + /// + /// Returns `None` if the input range spans multiple excerpts. + pub fn map_excerpt_ranges<'a, T>( + &'a self, + position: Range, + f: impl FnOnce( + &'a BufferSnapshot, + ExcerptRange, + Range, + ) -> Vec<(Range, T)>, + ) -> Option, T)>> { + let mut cursor = self.cursor::(); + cursor.seek(&position.start); + + let region = cursor.region()?; + if !region.is_main_buffer { + return None; + } + let excerpt = cursor.excerpt()?; + let excerpt_start = *cursor.excerpts.start(); + let input_buffer_start = cursor.buffer_position_at(&position.start)?; + + cursor.seek_forward(&position.end); + if cursor.excerpt()? != excerpt { + return None; + } + let region = cursor.region()?; + if !region.is_main_buffer { + return None; + } + let input_buffer_end = cursor.buffer_position_at(&position.end)?; + let input_buffer_range = input_buffer_start..input_buffer_end; + let buffer = excerpt.buffer_snapshot(self); + let excerpt_context_range = excerpt.range.context.to_offset(buffer); + let excerpt_context_range = + BufferOffset(excerpt_context_range.start)..BufferOffset(excerpt_context_range.end); + let excerpt_primary_range = excerpt.range.primary.to_offset(buffer); + let excerpt_primary_range = + BufferOffset(excerpt_primary_range.start)..BufferOffset(excerpt_primary_range.end); + let results = f( + buffer, + ExcerptRange { + context: excerpt_context_range.clone(), + primary: excerpt_primary_range, + }, + input_buffer_range, + ); + let mut diff_transforms = cursor.diff_transforms; + Some( + results + .into_iter() + .map(|(buffer_range, metadata)| { + let clamped_start = buffer_range + .start + .max(excerpt_context_range.start) + .min(excerpt_context_range.end); + let clamped_end = buffer_range + .end + .max(clamped_start) + .min(excerpt_context_range.end); + let excerpt_offset_start = + excerpt_start + (clamped_start.0 - excerpt_context_range.start.0); + let excerpt_offset_end = + excerpt_start + (clamped_end.0 - excerpt_context_range.start.0); + + diff_transforms.seek(&excerpt_offset_start, Bias::Right); + let mut output_start = diff_transforms.start().output_dimension; + output_start += + excerpt_offset_start - diff_transforms.start().excerpt_dimension; + + diff_transforms.seek_forward(&excerpt_offset_end, Bias::Right); + let mut output_end = diff_transforms.start().output_dimension; + output_end += excerpt_offset_end - diff_transforms.start().excerpt_dimension; + + (output_start.0..output_end.0, metadata) + }) + .collect(), + ) + } + /// Returns the smallest enclosing bracket ranges containing the given range or /// None if no brackets contain range or the range is not contained in a single /// excerpt @@ -6281,32 +5592,31 @@ impl MultiBufferSnapshot { >, ) -> Option<(Range, Range)> { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpt = self.excerpt_containing(range.clone())?; - let buffer = excerpt.buffer(); - let excerpt_buffer_range = excerpt.buffer_range(); - - // Filter to ranges contained in the excerpt - let range_filter = |open: Range, close: Range| -> bool { - excerpt_buffer_range.contains(&BufferOffset(open.start)) - && excerpt_buffer_range.contains(&BufferOffset(close.end)) - && range_filter.is_none_or(|filter| { - filter( - buffer, - BufferOffset(open.start)..BufferOffset(close.end), - BufferOffset(close.start)..BufferOffset(close.end), - ) - }) - }; - - let (open, close) = excerpt.buffer().innermost_enclosing_bracket_ranges( - excerpt.map_range_to_buffer(range), - Some(&range_filter), - )?; - - Some(( - excerpt.map_range_from_buffer(BufferOffset(open.start)..BufferOffset(open.end)), - excerpt.map_range_from_buffer(BufferOffset(close.start)..BufferOffset(close.end)), - )) + let results = + self.map_excerpt_ranges(range, |buffer, excerpt_range, input_buffer_range| { + let filter = |open: Range, close: Range| -> bool { + excerpt_range.context.start.0 <= open.start + && close.end <= excerpt_range.context.end.0 + && range_filter.is_none_or(|filter| { + filter( + buffer, + BufferOffset(open.start)..BufferOffset(close.end), + BufferOffset(close.start)..BufferOffset(close.end), + ) + }) + }; + let Some((open, close)) = + buffer.innermost_enclosing_bracket_ranges(input_buffer_range, Some(&filter)) + else { + return Vec::new(); + }; + vec![ + (BufferOffset(open.start)..BufferOffset(open.end), ()), + (BufferOffset(close.start)..BufferOffset(close.end), ()), + ] + })?; + let [(open, _), (close, _)] = results.try_into().ok()?; + Some((open, close)) } /// Returns enclosing bracket ranges containing the given range or returns None if the range is @@ -6314,30 +5624,33 @@ impl MultiBufferSnapshot { pub fn enclosing_bracket_ranges( &self, range: Range, - ) -> Option, Range)> + '_> - { + ) -> Option, Range)>> { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpt = self.excerpt_containing(range.clone())?; - - Some( - excerpt - .buffer() - .enclosing_bracket_ranges(excerpt.map_range_to_buffer(range)) - .filter_map(move |pair| { - let open_range = - BufferOffset(pair.open_range.start)..BufferOffset(pair.open_range.end); - let close_range = - BufferOffset(pair.close_range.start)..BufferOffset(pair.close_range.end); - if excerpt.contains_buffer_range(open_range.start..close_range.end) { - Some(( - excerpt.map_range_from_buffer(open_range), - excerpt.map_range_from_buffer(close_range), - )) - } else { - None - } - }), - ) + let results = + self.map_excerpt_ranges(range, |buffer, excerpt_range, input_buffer_range| { + buffer + .enclosing_bracket_ranges(input_buffer_range) + .filter(|pair| { + excerpt_range.context.start.0 <= pair.open_range.start + && pair.close_range.end <= excerpt_range.context.end.0 + }) + .flat_map(|pair| { + [ + ( + BufferOffset(pair.open_range.start) + ..BufferOffset(pair.open_range.end), + (), + ), + ( + BufferOffset(pair.close_range.start) + ..BufferOffset(pair.close_range.end), + (), + ), + ] + }) + .collect() + })?; + Some(results.into_iter().map(|(range, _)| range).tuples()) } /// Returns enclosing bracket ranges containing the given range or returns None if the range is @@ -6348,54 +5661,55 @@ impl MultiBufferSnapshot { options: TreeSitterOptions, ) -> impl Iterator, TextObject)> + '_ { let range = range.start.to_offset(self)..range.end.to_offset(self); - self.excerpt_containing(range.clone()) - .map(|mut excerpt| { - excerpt - .buffer() - .text_object_ranges(excerpt.map_range_to_buffer(range), options) - .filter_map(move |(range, text_object)| { - let range = BufferOffset(range.start)..BufferOffset(range.end); - if excerpt.contains_buffer_range(range.clone()) { - Some((excerpt.map_range_from_buffer(range), text_object)) - } else { - None - } - }) - }) - .into_iter() - .flatten() + self.map_excerpt_ranges(range, |buffer, excerpt_range, input_buffer_range| { + buffer + .text_object_ranges(input_buffer_range, options) + .filter(|(range, _)| { + excerpt_range.context.start.0 <= range.start + && range.end <= excerpt_range.context.end.0 + }) + .map(|(range, text_object)| { + ( + BufferOffset(range.start)..BufferOffset(range.end), + text_object, + ) + }) + .collect() + }) + .into_iter() + .flatten() } - /// Returns bracket range pairs overlapping the given `range` or returns None if the `range` is - /// not contained in a single excerpt pub fn bracket_ranges( &self, range: Range, - ) -> Option, Range)> + '_> - { + ) -> Option, Range)>> { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpt = self.excerpt_containing(range.clone())?; - Some( - excerpt - .buffer() - .bracket_ranges(excerpt.map_range_to_buffer(range)) - .filter_map(move |pair| { - let open_range = - BufferOffset(pair.open_range.start)..BufferOffset(pair.open_range.end); - let close_range = - BufferOffset(pair.close_range.start)..BufferOffset(pair.close_range.end); - excerpt - .contains_buffer_range(open_range.start..close_range.end) - .then(|| BracketMatch { - open_range: excerpt.map_range_from_buffer(open_range), - close_range: excerpt.map_range_from_buffer(close_range), - color_index: pair.color_index, - newline_only: pair.newline_only, - syntax_layer_depth: pair.syntax_layer_depth, - }) - }) - .map(BracketMatch::bracket_ranges), - ) + let results = + self.map_excerpt_ranges(range, |buffer, excerpt_range, input_buffer_range| { + buffer + .bracket_ranges(input_buffer_range) + .filter(|pair| { + excerpt_range.context.start.0 <= pair.open_range.start + && pair.close_range.end <= excerpt_range.context.end.0 + }) + .flat_map(|pair| { + [ + ( + BufferOffset(pair.open_range.start) + ..BufferOffset(pair.open_range.end), + (), + ), + ( + BufferOffset(pair.close_range.start) + ..BufferOffset(pair.close_range.end), + (), + ), + ] + }) + .collect() + })?; + Some(results.into_iter().map(|(range, _)| range).tuples()) } pub fn redacted_ranges<'a, T: ToOffset>( @@ -6448,7 +5762,7 @@ impl MultiBufferSnapshot { cursor.seek(&Point::new(start_row.0, 0)); iter::from_fn(move || { let mut region = cursor.region()?; - while !buffer_filter(®ion.excerpt.buffer) { + while !buffer_filter(®ion.excerpt.buffer_snapshot(self)) { cursor.next(); region = cursor.region()?; } @@ -6470,11 +5784,11 @@ impl MultiBufferSnapshot { .line_indents_in_row_range(buffer_start_row..buffer_end_row); let region_buffer_row = region.buffer_range.start.row; let region_row = region.range.start.row; - let region_buffer = ®ion.excerpt.buffer; + let region_buffer = region.excerpt.buffer_snapshot(self); cursor.next(); Some(line_indents.map(move |(buffer_row, indent)| { let row = region_row + (buffer_row - region_buffer_row); - (MultiBufferRow(row), indent, region_buffer.as_ref()) + (MultiBufferRow(row), indent, region_buffer) })) }) .flatten() @@ -6490,7 +5804,7 @@ impl MultiBufferSnapshot { cursor.seek(&Point::new(end_row.0, 0)); iter::from_fn(move || { let mut region = cursor.region()?; - while !buffer_filter(®ion.excerpt.buffer) { + while !buffer_filter(®ion.excerpt.buffer_snapshot(self)) { cursor.prev(); region = cursor.region()?; } @@ -6514,11 +5828,11 @@ impl MultiBufferSnapshot { .reversed_line_indents_in_row_range(buffer_start_row..buffer_end_row); let region_buffer_row = region.buffer_range.start.row; let region_row = region.range.start.row; - let region_buffer = ®ion.excerpt.buffer; + let region_buffer = region.excerpt.buffer_snapshot(self); cursor.prev(); Some(line_indents.map(move |(buffer_row, indent)| { let row = region_row + (buffer_row - region_buffer_row); - (MultiBufferRow(row), indent, region_buffer.as_ref()) + (MultiBufferRow(row), indent, region_buffer) })) }) .flatten() @@ -6788,7 +6102,7 @@ impl MultiBufferSnapshot { fn language_settings<'a>(&'a self, cx: &'a App) -> Cow<'a, LanguageSettings> { self.excerpts .first() - .map(|excerpt| &excerpt.buffer) + .map(|excerpt| excerpt.buffer_snapshot(self)) .map(|buffer| LanguageSettings::for_buffer_snapshot(buffer, None, cx)) .unwrap_or_else(move || self.language_settings_at(MultiBufferOffset::ZERO, cx)) } @@ -6840,7 +6154,7 @@ impl MultiBufferSnapshot { pub fn has_diagnostics(&self) -> bool { self.excerpts .iter() - .any(|excerpt| excerpt.buffer.has_diagnostics()) + .any(|excerpt| excerpt.buffer_snapshot(self).has_diagnostics()) } pub fn diagnostic_group( @@ -6919,7 +6233,12 @@ impl MultiBufferSnapshot { .map(|entry| (entry.range, entry.diagnostic)), ) }) - .map(|(range, diagnostic, b)| (b.buffer_id, DiagnosticEntryRef { diagnostic, range })) + .map(|(range, diagnostic, excerpt)| { + ( + excerpt.buffer_snapshot(self).remote_id(), + DiagnosticEntryRef { diagnostic, range }, + ) + }) } pub fn syntax_ancestor( @@ -6927,41 +6246,52 @@ impl MultiBufferSnapshot { range: Range, ) -> Option<(tree_sitter::Node<'_>, Range)> { let range = range.start.to_offset(self)..range.end.to_offset(self); - let mut excerpt = self.excerpt_containing(range.clone())?; - let node = excerpt - .buffer() - .syntax_ancestor(excerpt.map_range_to_buffer(range))?; - let node_range = node.byte_range(); - let node_range = BufferOffset(node_range.start)..BufferOffset(node_range.end); - if !excerpt.contains_buffer_range(node_range.clone()) { - return None; - }; - Some((node, excerpt.map_range_from_buffer(node_range))) + let results = + self.map_excerpt_ranges(range, |buffer, excerpt_range, input_buffer_range| { + let Some(node) = buffer.syntax_ancestor(input_buffer_range) else { + return vec![]; + }; + let node_range = node.byte_range(); + if excerpt_range.context.start.0 <= node_range.start + && node_range.end <= excerpt_range.context.end.0 + { + vec![( + BufferOffset(node_range.start)..BufferOffset(node_range.end), + node, + )] + } else { + vec![] + } + })?; + let (output_range, node) = results.into_iter().next()?; + Some((node, output_range)) } pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option> { - let (excerpt_id, _, buffer) = self.as_singleton()?; - let outline = buffer.outline(theme); + let buffer_snapshot = self.as_singleton()?; + let excerpt = self.excerpts.first()?; + let path_key_index = excerpt.path_key_index; + let outline = buffer_snapshot.outline(theme); Some(Outline::new( outline .items .into_iter() - .flat_map(|item| { - Some(OutlineItem { - depth: item.depth, - range: self.anchor_range_in_excerpt(excerpt_id, item.range)?, - source_range_for_text: self - .anchor_range_in_excerpt(excerpt_id, item.source_range_for_text)?, - text: item.text, - highlight_ranges: item.highlight_ranges, - name_ranges: item.name_ranges, - body_range: item.body_range.and_then(|body_range| { - self.anchor_range_in_excerpt(excerpt_id, body_range) - }), - annotation_range: item.annotation_range.and_then(|annotation_range| { - self.anchor_range_in_excerpt(excerpt_id, annotation_range) - }), - }) + .map(|item| OutlineItem { + depth: item.depth, + range: Anchor::range_in_buffer(path_key_index, item.range), + source_range_for_text: Anchor::range_in_buffer( + path_key_index, + item.source_range_for_text, + ), + text: item.text, + highlight_ranges: item.highlight_ranges, + name_ranges: item.name_ranges, + body_range: item + .body_range + .map(|body_range| Anchor::range_in_buffer(path_key_index, body_range)), + annotation_range: item.annotation_range.map(|annotation_range| { + Anchor::range_in_buffer(path_key_index, annotation_range) + }), }) .collect(), )) @@ -6973,173 +6303,90 @@ impl MultiBufferSnapshot { theme: Option<&SyntaxTheme>, ) -> Option<(BufferId, Vec>)> { let anchor = self.anchor_before(offset); - let excerpt @ &Excerpt { - id: excerpt_id, - buffer_id, - ref buffer, - .. - } = self.excerpt(anchor.excerpt_id)?; - if cfg!(debug_assertions) { - match anchor.text_anchor.buffer_id { - // we clearly are hitting this according to sentry, but in what situations can this occur? - Some(anchor_buffer_id) => { - assert_eq!( - anchor_buffer_id, buffer_id, - "anchor {anchor:?} does not match with resolved excerpt {excerpt:?}" - ) - } - None => assert!(anchor.is_max()), - } - }; + let target = anchor.try_seek_target(&self)?; + let (_, _, excerpt) = self.excerpts.find((), &target, Bias::Left); + let excerpt = excerpt?; + let buffer_snapshot = excerpt.buffer_snapshot(self); Some(( - buffer_id, - buffer - .symbols_containing(anchor.text_anchor, theme) + buffer_snapshot.remote_id(), + buffer_snapshot + .symbols_containing( + anchor + .excerpt_anchor() + .map(|anchor| anchor.text_anchor()) + .unwrap_or(text::Anchor::min_for_buffer(buffer_snapshot.remote_id())), + theme, + ) .into_iter() .flat_map(|item| { Some(OutlineItem { depth: item.depth, source_range_for_text: Anchor::range_in_buffer( - excerpt_id, + excerpt.path_key_index, item.source_range_for_text, ), - range: Anchor::range_in_buffer(excerpt_id, item.range), + range: Anchor::range_in_buffer(excerpt.path_key_index, item.range), text: item.text, highlight_ranges: item.highlight_ranges, name_ranges: item.name_ranges, - body_range: item - .body_range - .map(|body_range| Anchor::range_in_buffer(excerpt_id, body_range)), - annotation_range: item - .annotation_range - .map(|body_range| Anchor::range_in_buffer(excerpt_id, body_range)), + body_range: item.body_range.map(|body_range| { + Anchor::range_in_buffer(excerpt.path_key_index, body_range) + }), + annotation_range: item.annotation_range.map(|body_range| { + Anchor::range_in_buffer(excerpt.path_key_index, body_range) + }), }) }) .collect(), )) } - fn excerpt_locator_for_id(&self, id: ExcerptId) -> &Locator { - self.try_excerpt_locator_for_id(id) - .unwrap_or_else(|| panic!("invalid excerpt id {id:?}")) + pub fn buffer_for_path(&self, path: &PathKey) -> Option<&BufferSnapshot> { + let (_, _, excerpt) = self + .excerpts + .find::((), path, Bias::Left); + Some(excerpt?.buffer_snapshot(self)) } - fn try_excerpt_locator_for_id(&self, id: ExcerptId) -> Option<&Locator> { - if id == ExcerptId::min() { - Some(Locator::min_ref()) - } else if id == ExcerptId::max() { - Some(Locator::max_ref()) - } else { - let (_, _, item) = self.excerpt_ids.find::((), &id, Bias::Left); - if let Some(entry) = item - && entry.id == id - { - return Some(&entry.locator); - } - None - } + pub fn path_for_buffer(&self, buffer_id: BufferId) -> Option<&PathKey> { + Some(&self.buffers.get(&buffer_id)?.path_key) } - /// Returns the locators referenced by the given excerpt IDs, sorted by locator. - fn excerpt_locators_for_ids( - &self, - ids: impl IntoIterator, - ) -> SmallVec<[Locator; 1]> { - let mut sorted_ids = ids.into_iter().collect::>(); - sorted_ids.sort_unstable(); - sorted_ids.dedup(); - let mut locators = SmallVec::new(); - - while sorted_ids.last() == Some(&ExcerptId::max()) { - sorted_ids.pop(); - locators.push(Locator::max()); - } - - let mut sorted_ids = sorted_ids.into_iter().peekable(); - locators.extend( - sorted_ids - .peeking_take_while(|excerpt| *excerpt == ExcerptId::min()) - .map(|_| Locator::min()), - ); - - let mut cursor = self.excerpt_ids.cursor::(()); - for id in sorted_ids { - if cursor.seek_forward(&id, Bias::Left) { - locators.push(cursor.item().unwrap().locator.clone()); - } else { - panic!("invalid excerpt id {:?}", id); - } - } + pub(crate) fn path_key_index_for_buffer(&self, buffer_id: BufferId) -> Option { + let snapshot = self.buffers.get(&buffer_id)?; + Some(snapshot.path_key_index) + } - locators.sort_unstable(); - locators + fn first_excerpt_for_buffer(&self, buffer_id: BufferId) -> Option<&Excerpt> { + let path_key = &self.buffers.get(&buffer_id)?.path_key; + self.first_excerpt_for_path(path_key) } - pub fn buffer_id_for_excerpt(&self, excerpt_id: ExcerptId) -> Option { - Some(self.excerpt(excerpt_id)?.buffer_id) + fn first_excerpt_for_path(&self, path_key: &PathKey) -> Option<&Excerpt> { + let (_, _, first_excerpt) = + self.excerpts + .find::((), path_key, Bias::Left); + first_excerpt } - pub fn buffer_for_excerpt(&self, excerpt_id: ExcerptId) -> Option<&BufferSnapshot> { - Some(&self.excerpt(excerpt_id)?.buffer) + pub fn buffer_for_id(&self, id: BufferId) -> Option<&BufferSnapshot> { + self.buffers.get(&id).map(|state| &state.buffer_snapshot) } - pub fn range_for_excerpt(&self, excerpt_id: ExcerptId) -> Option> { - let mut cursor = self - .excerpts - .cursor::, ExcerptPoint>>(()); - let locator = self.excerpt_locator_for_id(excerpt_id); - let mut sought_exact = cursor.seek(&Some(locator), Bias::Left); - if cursor.item().is_none() && excerpt_id == ExcerptId::max() { - sought_exact = true; - cursor.prev(); - } else if excerpt_id == ExcerptId::min() { - sought_exact = true; - } - if sought_exact { - let start = cursor.start().1; - let end = cursor.end().1; - let mut diff_transforms = self - .diff_transforms - .cursor::>>(()); - diff_transforms.seek(&start, Bias::Left); - let overshoot = start - diff_transforms.start().0; - let start = diff_transforms.start().1 + overshoot; - diff_transforms.seek(&end, Bias::Right); - let overshoot = end - diff_transforms.start().0; - let end = diff_transforms.start().1 + overshoot; - Some(start.0..end.0) - } else { - None - } + fn try_path_for_anchor(&self, anchor: ExcerptAnchor) -> Option { + self.path_keys_by_index.get(&anchor.path).cloned() } - /// Returns the excerpt for the given id. The returned excerpt is guaranteed - /// to have the latest excerpt id for the one passed in and will also remap - /// `ExcerptId::max()` to the corresponding excertp ID. - /// - /// Callers of this function should generally use the resulting excerpt's `id` field - /// afterwards. - fn excerpt(&self, excerpt_id: ExcerptId) -> Option<&Excerpt> { - let excerpt_id = self.latest_excerpt_id(excerpt_id); - let locator = self.try_excerpt_locator_for_id(excerpt_id)?; - let (_, _, item) = - self.excerpts - .find::, _>((), &Some(locator), Bias::Left); - if let Some(excerpt) = item - && excerpt.id == excerpt_id - { - return Some(excerpt); - } else if item.is_none() && excerpt_id == ExcerptId::max() { - return self.excerpts.last(); - } - None + pub fn path_for_anchor(&self, anchor: ExcerptAnchor) -> PathKey { + self.try_path_for_anchor(anchor) + .expect("invalid anchor: path was never added to multibuffer") } /// Returns the excerpt containing range and its offset start within the multibuffer or none if `range` spans multiple excerpts pub fn excerpt_containing( &self, range: Range, - ) -> Option> { + ) -> Option<(&BufferSnapshot, ExcerptRange)> { let range = range.start.to_offset(self)..range.end.to_offset(self); let mut cursor = self.cursor::(); cursor.seek(&range.start); @@ -7147,31 +6394,15 @@ impl MultiBufferSnapshot { let start_excerpt = cursor.excerpt()?; if range.end != range.start { cursor.seek_forward(&range.end); - if cursor.excerpt()?.id != start_excerpt.id { + if cursor.excerpt()? != start_excerpt { return None; } } - cursor.seek_to_start_of_current_excerpt(); - let region = cursor.region()?; - let offset = region.range.start; - let buffer_offset = start_excerpt.buffer_start_offset(); - let excerpt_offset = *cursor.excerpts.start(); - Some(MultiBufferExcerpt { - diff_transforms: cursor.diff_transforms, - excerpt: start_excerpt, - offset, - buffer_offset, - excerpt_offset, - }) - } - - pub fn buffer_id_for_anchor(&self, anchor: Anchor) -> Option { - if let Some(id) = anchor.text_anchor.buffer_id { - return Some(id); - } - let excerpt = self.excerpt_containing(anchor..anchor)?; - Some(excerpt.buffer_id()) + Some(( + start_excerpt.buffer_snapshot(self), + start_excerpt.range.clone(), + )) } pub fn selections_in_range<'a>( @@ -7180,27 +6411,34 @@ impl MultiBufferSnapshot { include_local: bool, ) -> impl 'a + Iterator)> { let mut cursor = self.excerpts.cursor::(()); - let start_locator = self.excerpt_locator_for_id(range.start.excerpt_id); - let end_locator = self.excerpt_locator_for_id(range.end.excerpt_id); - cursor.seek(start_locator, Bias::Left); + cursor.seek(&range.start.seek_target(self), Bias::Left); cursor - .take_while(move |excerpt| excerpt.locator <= *end_locator) + .take_while(move |excerpt| { + let excerpt_start = + Anchor::in_buffer(excerpt.path_key_index, excerpt.range.context.start); + excerpt_start.cmp(&range.end, self).is_le() + }) .flat_map(move |excerpt| { + let buffer_snapshot = excerpt.buffer_snapshot(self); let mut query_range = excerpt.range.context.start..excerpt.range.context.end; - if excerpt.id == range.start.excerpt_id { - query_range.start = range.start.text_anchor; + if let Some(excerpt_anchor) = range.start.excerpt_anchor() + && excerpt.contains(&excerpt_anchor, self) + { + query_range.start = excerpt_anchor.text_anchor(); } - if excerpt.id == range.end.excerpt_id { - query_range.end = range.end.text_anchor; + if let Some(excerpt_anchor) = range.end.excerpt_anchor() + && excerpt.contains(&excerpt_anchor, self) + { + query_range.end = excerpt_anchor.text_anchor(); } - excerpt - .buffer + buffer_snapshot .selections_in_range(query_range, include_local) .flat_map(move |(replica_id, line_mode, cursor_shape, selections)| { selections.map(move |selection| { - let mut start = Anchor::in_buffer(excerpt.id, selection.start); - let mut end = Anchor::in_buffer(excerpt.id, selection.end); + let mut start = + Anchor::in_buffer(excerpt.path_key_index, selection.start); + let mut end = Anchor::in_buffer(excerpt.path_key_index, selection.end); if range.start.cmp(&start, self).is_gt() { start = range.start; } @@ -7237,91 +6475,251 @@ impl MultiBufferSnapshot { find_diff_state(&self.diffs, buffer_id) } - pub fn total_changed_lines(&self) -> (u32, u32) { - let summary = self.diffs.summary(); - (summary.added_rows, summary.removed_rows) - } + pub fn total_changed_lines(&self) -> (u32, u32) { + let summary = self.diffs.summary(); + (summary.added_rows, summary.removed_rows) + } + + pub fn all_diff_hunks_expanded(&self) -> bool { + self.all_diff_hunks_expanded + } + + /// Visually annotates a position or range with the `Debug` representation of a value. The + /// callsite of this function is used as a key - previous annotations will be removed. + #[cfg(debug_assertions)] + #[track_caller] + pub fn debug(&self, ranges: &R, value: V) + where + R: debug::ToMultiBufferDebugRanges, + V: std::fmt::Debug, + { + self.debug_with_key(std::panic::Location::caller(), ranges, value); + } + + /// Visually annotates a position or range with the `Debug` representation of a value. Previous + /// debug annotations with the same key will be removed. The key is also used to determine the + /// annotation's color. + #[cfg(debug_assertions)] + #[track_caller] + pub fn debug_with_key(&self, key: &K, ranges: &R, value: V) + where + K: std::hash::Hash + 'static, + R: debug::ToMultiBufferDebugRanges, + V: std::fmt::Debug, + { + let text_ranges = ranges + .to_multi_buffer_debug_ranges(self) + .into_iter() + .flat_map(|range| { + self.range_to_buffer_ranges(range) + .into_iter() + .map(|(buffer_snapshot, range, _)| { + buffer_snapshot.anchor_after(range.start) + ..buffer_snapshot.anchor_before(range.end) + }) + }) + .collect(); + text::debug::GlobalDebugRanges::with_locked(|debug_ranges| { + debug_ranges.insert(key, text_ranges, format!("{value:?}").into()) + }); + } + + fn excerpt_edits_for_diff_change( + &self, + path: &PathKey, + diff_change_range: Range, + ) -> Vec>> { + let mut excerpt_edits = Vec::new(); + let mut cursor = self.excerpts.cursor::(()); + cursor.seek(path, Bias::Left); + while let Some(excerpt) = cursor.item() + && &excerpt.path_key == path + { + let buffer_snapshot = excerpt.buffer_snapshot(self); + let excerpt_buffer_range = excerpt.range.context.to_offset(buffer_snapshot); + let excerpt_start = cursor.start().clone(); + let excerpt_len = excerpt.text_summary.len; + cursor.next(); + if diff_change_range.end < excerpt_buffer_range.start + || diff_change_range.start > excerpt_buffer_range.end + { + continue; + } + let diff_change_start_in_excerpt = diff_change_range + .start + .saturating_sub(excerpt_buffer_range.start); + let diff_change_end_in_excerpt = diff_change_range + .end + .saturating_sub(excerpt_buffer_range.start); + let edit_start = excerpt_start.len() + diff_change_start_in_excerpt.min(excerpt_len); + let edit_end = excerpt_start.len() + diff_change_end_in_excerpt.min(excerpt_len); + excerpt_edits.push(Edit { + old: edit_start..edit_end, + new: edit_start..edit_end, + }); + } + excerpt_edits + } + + fn excerpts_for_path<'a>( + &'a self, + path_key: &'a PathKey, + ) -> impl Iterator> + 'a { + let mut cursor = self.excerpts.cursor::(()); + cursor.seek(path_key, Bias::Left); + cursor + .take_while(move |item| &item.path_key == path_key) + .map(|excerpt| excerpt.range.clone()) + } + + /// If the given multibuffer range is contained in a single excerpt and contains no deleted hunks, + /// returns the corresponding buffer range. + /// + /// Otherwise, returns None. + pub fn range_to_buffer_range( + &self, + range: Range, + ) -> Option<(&BufferSnapshot, Range)> + where + MBD: MultiBufferDimension + Ord + Sub + ops::AddAssign<::Output>, + MBD::TextDimension: AddAssign<::Output>, + { + let mut cursor = self.cursor::(); + cursor.seek(&range.start); + + let start_region = cursor.region()?.clone(); + + while let Some(region) = cursor.region() + && region.range.end < range.end + { + if !region.is_main_buffer { + return None; + } + cursor.next(); + } + + let end_region = cursor.region()?; + if end_region.buffer.remote_id() != start_region.buffer.remote_id() { + return None; + } + + let mut buffer_start = start_region.buffer_range.start; + buffer_start += range.start - start_region.range.start; + let mut buffer_end = end_region.buffer_range.start; + buffer_end += range.end - end_region.range.start; + + Some((start_region.buffer, buffer_start..buffer_end)) + } + + /// If the two endpoints of the range lie in the same excerpt, return the corresponding + /// buffer range. Intervening deleted hunks are allowed. + pub fn anchor_range_to_buffer_anchor_range( + &self, + range: Range, + ) -> Option<(&BufferSnapshot, Range)> { + let mut cursor = self.excerpts.cursor::(()); + cursor.seek(&range.start.seek_target(&self), Bias::Left); + + let start_excerpt = cursor.item()?; + + let snapshot = start_excerpt.buffer_snapshot(&self); + + cursor.seek(&range.end.seek_target(&self), Bias::Left); + + let end_excerpt = cursor.item()?; + + if start_excerpt != end_excerpt { + return None; + } + + if let Anchor::Excerpt(excerpt_anchor) = range.start + && (excerpt_anchor.path != start_excerpt.path_key_index + || excerpt_anchor.buffer_id() != snapshot.remote_id()) + { + return None; + } + if let Anchor::Excerpt(excerpt_anchor) = range.end + && (excerpt_anchor.path != end_excerpt.path_key_index + || excerpt_anchor.buffer_id() != snapshot.remote_id()) + { + return None; + } + + Some(( + snapshot, + range.start.text_anchor_in(snapshot)..range.end.text_anchor_in(snapshot), + )) + } + + /// Returns all nonempty intersections of the given buffer range with excerpts in the multibuffer in order. + /// + /// The multibuffer ranges are split to not intersect deleted hunks. + pub fn buffer_range_to_excerpt_ranges( + &self, + range: Range, + ) -> impl Iterator> { + assert!(range.start.buffer_id == range.end.buffer_id); + + let buffer_id = range.start.buffer_id; + self.buffers + .get(&buffer_id) + .map(|buffer_state_snapshot| { + let path_key_index = buffer_state_snapshot.path_key_index; + let buffer_snapshot = &buffer_state_snapshot.buffer_snapshot; + let buffer_range = range.to_offset(buffer_snapshot); + + let start = Anchor::in_buffer(path_key_index, range.start).to_offset(self); + let mut cursor = self.cursor::(); + cursor.seek(&start); + std::iter::from_fn(move || { + while let Some(region) = cursor.region() + && !region.is_main_buffer + { + cursor.next(); + } + + let region = cursor.region()?; + if region.buffer.remote_id() != buffer_id + || region.buffer_range.start > BufferOffset(buffer_range.end) + { + return None; + } - pub fn all_diff_hunks_expanded(&self) -> bool { - self.all_diff_hunks_expanded - } + let start = region + .buffer_range + .start + .max(BufferOffset(buffer_range.start)); + let mut end = region.buffer_range.end.min(BufferOffset(buffer_range.end)); - /// Visually annotates a position or range with the `Debug` representation of a value. The - /// callsite of this function is used as a key - previous annotations will be removed. - #[cfg(debug_assertions)] - #[track_caller] - pub fn debug(&self, ranges: &R, value: V) - where - R: debug::ToMultiBufferDebugRanges, - V: std::fmt::Debug, - { - self.debug_with_key(std::panic::Location::caller(), ranges, value); - } + cursor.next(); + while let Some(region) = cursor.region() + && region.is_main_buffer + && region.buffer.remote_id() == buffer_id + && region.buffer_range.start <= end + { + end = end + .max(region.buffer_range.end) + .min(BufferOffset(buffer_range.end)); + cursor.next(); + } - /// Visually annotates a position or range with the `Debug` representation of a value. Previous - /// debug annotations with the same key will be removed. The key is also used to determine the - /// annotation's color. - #[cfg(debug_assertions)] - #[track_caller] - pub fn debug_with_key(&self, key: &K, ranges: &R, value: V) - where - K: std::hash::Hash + 'static, - R: debug::ToMultiBufferDebugRanges, - V: std::fmt::Debug, - { - let text_ranges = ranges - .to_multi_buffer_debug_ranges(self) - .into_iter() - .flat_map(|range| { - self.range_to_buffer_ranges(range.start..=range.end) - .into_iter() - .map(|(buffer, range, _excerpt_id)| { - buffer.anchor_after(range.start)..buffer.anchor_before(range.end) - }) + let multibuffer_range = Anchor::range_in_buffer( + path_key_index, + buffer_snapshot.anchor_range_inside(start..end), + ); + Some(multibuffer_range) + }) }) - .collect(); - text::debug::GlobalDebugRanges::with_locked(|debug_ranges| { - debug_ranges.insert(key, text_ranges, format!("{value:?}").into()) - }); + .into_iter() + .flatten() } - fn excerpt_edits_for_diff_change( - &self, - buffer_state: &BufferState, - diff_change_range: Range, - ) -> Vec>> { - let mut excerpt_edits = Vec::new(); - for locator in &buffer_state.excerpts { - let mut cursor = self - .excerpts - .cursor::, ExcerptOffset>>(()); - cursor.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = cursor.item() - && excerpt.locator == *locator - { - let excerpt_buffer_range = excerpt.range.context.to_offset(&excerpt.buffer); - if diff_change_range.end < excerpt_buffer_range.start - || diff_change_range.start > excerpt_buffer_range.end - { - continue; - } - let excerpt_start = cursor.start().1; - let excerpt_len = excerpt.text_summary.len; - let diff_change_start_in_excerpt = diff_change_range - .start - .saturating_sub(excerpt_buffer_range.start); - let diff_change_end_in_excerpt = diff_change_range - .end - .saturating_sub(excerpt_buffer_range.start); - let edit_start = excerpt_start + diff_change_start_in_excerpt.min(excerpt_len); - let edit_end = excerpt_start + diff_change_end_in_excerpt.min(excerpt_len); - excerpt_edits.push(Edit { - old: edit_start..edit_end, - new: edit_start..edit_end, - }); - } - } - excerpt_edits + pub fn buffers_with_paths<'a>( + &'a self, + ) -> impl 'a + Iterator { + self.buffers + .values() + .map(|buffer| (&buffer.buffer_snapshot, &buffer.path_key)) } /// Returns the number of graphemes in `range`. @@ -7333,6 +6731,13 @@ impl MultiBufferSnapshot { .graphemes(true) .count() } + + pub fn range_for_buffer(&self, buffer_id: BufferId) -> Option> { + let path_key = self.path_key_index_for_buffer(buffer_id)?; + let start = Anchor::in_buffer(path_key, text::Anchor::min_for_buffer(buffer_id)); + let end = Anchor::in_buffer(path_key, text::Anchor::max_for_buffer(buffer_id)); + Some((start..end).to_point(self)) + } } #[cfg(any(test, feature = "test-support"))] @@ -7350,27 +6755,74 @@ impl MultiBufferSnapshot { #[cfg(any(test, feature = "test-support"))] fn check_invariants(&self) { let excerpts = self.excerpts.items(()); - let excerpt_ids = self.excerpt_ids.items(()); + + let mut all_buffer_path_keys = HashSet::default(); + for buffer in self.buffers.values() { + let path_key = buffer.path_key.clone(); + assert!( + all_buffer_path_keys.insert(path_key), + "path key reused for multiple buffers: {:#?}", + self.buffers + ); + } + + let all_excerpt_path_keys = HashSet::from_iter(excerpts.iter().map(|e| e.path_key.clone())); for (ix, excerpt) in excerpts.iter().enumerate() { - if ix == 0 { - if excerpt.locator <= Locator::min() { - panic!("invalid first excerpt locator {:?}", excerpt.locator); + if ix > 0 { + let prev = &excerpts[ix - 1]; + + if excerpt.path_key < prev.path_key { + panic!("excerpt path_keys are out-of-order: {:#?}", excerpts); + } else if excerpt.path_key == prev.path_key { + assert_eq!( + excerpt.buffer_id, prev.buffer_id, + "excerpts with same path_key have different buffer_ids: {:#?}", + excerpts + ); + if excerpt + .start_anchor() + .cmp(&prev.end_anchor(), &self) + .is_le() + { + panic!("excerpt anchors are out-of-order: {:#?}", excerpts); + } + if excerpt + .start_anchor() + .cmp(&excerpt.end_anchor(), &self) + .is_ge() + { + panic!("excerpt with backward range: {:#?}", excerpts); + } } - } else if excerpt.locator <= excerpts[ix - 1].locator { - panic!("excerpts are out-of-order: {:?}", excerpts); } - } - for (ix, entry) in excerpt_ids.iter().enumerate() { - if ix == 0 { - if entry.id.cmp(&ExcerptId::min(), self).is_le() { - panic!("invalid first excerpt id {:?}", entry.id); - } - } else if entry.id <= excerpt_ids[ix - 1].id { - panic!("excerpt ids are out-of-order: {:?}", excerpt_ids); + if ix < excerpts.len() - 1 { + assert!( + excerpt.has_trailing_newline, + "non-trailing excerpt has no trailing newline: {:#?}", + excerpts + ); + } else { + assert!( + !excerpt.has_trailing_newline, + "trailing excerpt has trailing newline: {:#?}", + excerpts + ); } + assert!( + all_buffer_path_keys.contains(&excerpt.path_key), + "excerpt path key not found in active path keys: {:#?}", + excerpt.path_key + ); + assert_eq!( + self.path_keys_by_index.get(&excerpt.path_key_index), + Some(&excerpt.path_key), + "excerpt path key index does not match path key: {:#?}", + excerpt.path_key, + ); } + assert_eq!(all_buffer_path_keys, all_excerpt_path_keys); if self.diff_transforms.summary().input != self.excerpts.summary().text { panic!( @@ -7518,7 +6970,7 @@ where && self .excerpts .item() - .is_some_and(|excerpt| excerpt.id != hunk_info.excerpt_id) + .is_some_and(|excerpt| excerpt.end_anchor() != hunk_info.excerpt_end) { self.excerpts.next(); } @@ -7584,13 +7036,13 @@ where DiffTransform::DeletedHunk { hunk_info, .. } => self .excerpts .item() - .is_some_and(|excerpt| excerpt.id != hunk_info.excerpt_id), + .is_some_and(|excerpt| excerpt.end_anchor() != hunk_info.excerpt_end), }) } fn main_buffer_position(&self) -> Option { let excerpt = self.excerpts.item()?; - let buffer = &excerpt.buffer; + let buffer = excerpt.buffer_snapshot(self.snapshot); let buffer_context_start = excerpt.range.context.start.summary::(buffer); let mut buffer_start = buffer_context_start; let overshoot = self.diff_transforms.end().excerpt_dimension - *self.excerpts.start(); @@ -7598,6 +7050,19 @@ where Some(buffer_start) } + fn buffer_position_at(&self, output_position: &MBD) -> Option { + let excerpt = self.excerpts.item()?; + let buffer = excerpt.buffer_snapshot(self.snapshot); + let buffer_context_start = excerpt.range.context.start.summary::(buffer); + let mut excerpt_offset = self.diff_transforms.start().excerpt_dimension; + if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() { + excerpt_offset += *output_position - self.diff_transforms.start().output_dimension.0; + } + let mut result = buffer_context_start; + result += excerpt_offset - *self.excerpts.start(); + Some(result) + } + fn build_region(&self) -> Option> { let excerpt = self.excerpts.item()?; match self.diff_transforms.item()? { @@ -7608,7 +7073,7 @@ where hunk_info, .. } => { - let diff = find_diff_state(self.diffs, *buffer_id)?; + let diff = find_diff_state(&self.snapshot.diffs, *buffer_id)?; let buffer = diff.base_text(); let mut rope_cursor = buffer.as_rope().cursor(0); let buffer_start = rope_cursor.summary::(base_text_byte_range.start); @@ -7632,7 +7097,7 @@ where DiffTransform::BufferContent { inserted_hunk_info, .. } => { - let buffer = &excerpt.buffer; + let buffer = excerpt.buffer_snapshot(self.snapshot); let buffer_context_start = excerpt.range.context.start.summary::(buffer); let mut start = self.diff_transforms.start().output_dimension.0; @@ -7726,28 +7191,47 @@ where impl Excerpt { fn new( - id: ExcerptId, - locator: Locator, - buffer_id: BufferId, - buffer: Arc, + path_key: PathKey, + path_key_index: PathKeyIndex, + buffer_snapshot: &BufferSnapshot, range: ExcerptRange, has_trailing_newline: bool, ) -> Self { Excerpt { - id, - locator, - max_buffer_row: range.context.end.to_point(&buffer).row, - text_summary: buffer - .text_summary_for_range::(range.context.to_offset(&buffer)), - buffer_id, - buffer, + path_key, + path_key_index, + buffer_id: buffer_snapshot.remote_id(), + max_buffer_row: range.context.end.to_point(&buffer_snapshot).row, + text_summary: buffer_snapshot.text_summary_for_range::( + range.context.to_offset(&buffer_snapshot), + ), range, has_trailing_newline, } } - fn chunks_in_range(&self, range: Range, language_aware: bool) -> ExcerptChunks<'_> { - let content_start = self.range.context.start.to_offset(&self.buffer); + fn buffer_snapshot<'a>(&self, snapshot: &'a MultiBufferSnapshot) -> &'a BufferSnapshot { + &snapshot + .buffers + .get(&self.buffer_id) + .expect("buffer snapshot not found for excerpt") + .buffer_snapshot + } + + fn buffer(&self, multibuffer: &MultiBuffer) -> Entity { + multibuffer + .buffer(self.buffer_id) + .expect("buffer entity not found for excerpt") + } + + fn chunks_in_range<'a>( + &'a self, + range: Range, + language_aware: bool, + snapshot: &'a MultiBufferSnapshot, + ) -> ExcerptChunks<'a> { + let buffer = self.buffer_snapshot(snapshot); + let content_start = self.range.context.start.to_offset(buffer); let chunks_start = content_start + range.start; let chunks_end = content_start + cmp::min(range.end, self.text_summary.len); @@ -7755,17 +7239,23 @@ impl Excerpt { && range.start <= self.text_summary.len && range.end > self.text_summary.len; - let content_chunks = self.buffer.chunks(chunks_start..chunks_end, language_aware); + let content_chunks = buffer.chunks(chunks_start..chunks_end, language_aware); ExcerptChunks { - excerpt_id: self.id, content_chunks, has_footer, + end: self.end_anchor(), } } - fn seek_chunks(&self, excerpt_chunks: &mut ExcerptChunks, range: Range) { - let content_start = self.range.context.start.to_offset(&self.buffer); + fn seek_chunks( + &self, + excerpt_chunks: &mut ExcerptChunks, + range: Range, + snapshot: &MultiBufferSnapshot, + ) { + let buffer = self.buffer_snapshot(snapshot); + let content_start = self.range.context.start.to_offset(buffer); let chunks_start = content_start + range.start; let chunks_end = content_start + cmp::min(range.end, self.text_summary.len); excerpt_chunks.content_chunks.seek(chunks_start..chunks_end); @@ -7774,218 +7264,43 @@ impl Excerpt { && range.end > self.text_summary.len; } - fn clip_anchor(&self, text_anchor: text::Anchor) -> text::Anchor { - if text_anchor - .cmp(&self.range.context.start, &self.buffer) - .is_lt() - { + fn clip_anchor( + &self, + text_anchor: text::Anchor, + snapshot: &MultiBufferSnapshot, + ) -> text::Anchor { + let buffer = self.buffer_snapshot(snapshot); + if text_anchor.cmp(&self.range.context.start, buffer).is_lt() { self.range.context.start - } else if text_anchor - .cmp(&self.range.context.end, &self.buffer) - .is_gt() - { + } else if text_anchor.cmp(&self.range.context.end, buffer).is_gt() { self.range.context.end } else { text_anchor } } - fn contains(&self, anchor: &Anchor) -> bool { - (anchor.text_anchor.buffer_id == None - || anchor.text_anchor.buffer_id == Some(self.buffer_id)) - && self - .range - .context - .start - .cmp(&anchor.text_anchor, &self.buffer) - .is_le() + pub(crate) fn contains(&self, anchor: &ExcerptAnchor, snapshot: &MultiBufferSnapshot) -> bool { + self.path_key_index == anchor.path + && self.buffer_id == anchor.text_anchor.buffer_id && self .range - .context - .end - .cmp(&anchor.text_anchor, &self.buffer) - .is_ge() - } - - /// The [`Excerpt`]'s start offset in its [`Buffer`] - fn buffer_start_offset(&self) -> BufferOffset { - BufferOffset(self.range.context.start.to_offset(&self.buffer)) - } - - /// The [`Excerpt`]'s end offset in its [`Buffer`] - fn buffer_end_offset(&self) -> BufferOffset { - self.buffer_start_offset() + self.text_summary.len - } -} - -impl<'a> MultiBufferExcerpt<'a> { - pub fn id(&self) -> ExcerptId { - self.excerpt.id - } - - pub fn buffer_id(&self) -> BufferId { - self.excerpt.buffer_id - } - - pub fn start_anchor(&self) -> Anchor { - Anchor::in_buffer(self.excerpt.id, self.excerpt.range.context.start) - } - - pub fn end_anchor(&self) -> Anchor { - Anchor::in_buffer(self.excerpt.id, self.excerpt.range.context.end) - } - - pub fn buffer(&self) -> &'a BufferSnapshot { - &self.excerpt.buffer - } - - pub fn buffer_range(&self) -> Range { - self.buffer_offset - ..BufferOffset( - self.excerpt - .range - .context - .end - .to_offset(&self.excerpt.buffer.text), - ) - } - - pub fn start_offset(&self) -> MultiBufferOffset { - self.offset - } - - /// Maps an offset within the [`MultiBuffer`] to an offset within the [`Buffer`] - pub fn map_offset_to_buffer(&mut self, offset: MultiBufferOffset) -> BufferOffset { - self.map_range_to_buffer(offset..offset).start - } - - /// Maps a range within the [`MultiBuffer`] to a range within the [`Buffer`] - pub fn map_range_to_buffer(&mut self, range: Range) -> Range { - self.diff_transforms - .seek(&OutputDimension(range.start), Bias::Right); - let start = self.map_offset_to_buffer_internal(range.start); - let end = if range.end > range.start { - self.diff_transforms - .seek_forward(&OutputDimension(range.end), Bias::Right); - self.map_offset_to_buffer_internal(range.end) - } else { - start - }; - start..end - } - - fn map_offset_to_buffer_internal(&self, offset: MultiBufferOffset) -> BufferOffset { - let mut excerpt_offset = self.diff_transforms.start().excerpt_dimension; - if let Some(DiffTransform::BufferContent { .. }) = self.diff_transforms.item() { - excerpt_offset += offset - self.diff_transforms.start().output_dimension.0; - }; - let offset_in_excerpt = excerpt_offset.saturating_sub(self.excerpt_offset); - self.buffer_offset + offset_in_excerpt - } - - /// Map an offset within the [`Buffer`] to an offset within the [`MultiBuffer`] - pub fn map_offset_from_buffer(&mut self, buffer_offset: BufferOffset) -> MultiBufferOffset { - self.map_range_from_buffer(buffer_offset..buffer_offset) - .start - } - - /// Map a range within the [`Buffer`] to a range within the [`MultiBuffer`] - pub fn map_range_from_buffer( - &mut self, - buffer_range: Range, - ) -> Range { - if buffer_range.start < self.buffer_offset { - log::warn!( - "Attempting to map a range from a buffer offset that starts before the current buffer offset" - ); - return self.offset..self.offset; - } - let overshoot = buffer_range.start - self.buffer_offset; - let excerpt_offset = self.excerpt_offset + overshoot; - let excerpt_seek_dim = excerpt_offset; - self.diff_transforms.seek(&excerpt_seek_dim, Bias::Right); - if self.diff_transforms.start().excerpt_dimension > excerpt_offset { - log::warn!( - "Attempting to map a range from a buffer offset that starts before the current buffer offset" - ); - return self.offset..self.offset; - } - let overshoot = excerpt_offset - self.diff_transforms.start().excerpt_dimension; - let start = self.diff_transforms.start().output_dimension.0 + overshoot; - - let end = if buffer_range.start < buffer_range.end { - let overshoot = buffer_range.end - self.buffer_offset; - let excerpt_offset = self.excerpt_offset + overshoot; - let excerpt_seek_dim = excerpt_offset; - self.diff_transforms - .seek_forward(&excerpt_seek_dim, Bias::Right); - let overshoot = excerpt_offset - self.diff_transforms.start().excerpt_dimension; - // todo(lw): Clamp end to the excerpt boundaries - self.diff_transforms.start().output_dimension.0 + overshoot - } else { - start - }; - - start..end - } - - /// Returns true if the entirety of the given range is in the buffer's excerpt - pub fn contains_buffer_range(&self, range: Range) -> bool { - range.start >= self.excerpt.buffer_start_offset() - && range.end <= self.excerpt.buffer_end_offset() - } - - /// Returns true if any part of the given range is in the buffer's excerpt - pub fn contains_partial_buffer_range(&self, range: Range) -> bool { - range.start <= self.excerpt.buffer_end_offset() - && range.end >= self.excerpt.buffer_start_offset() - } - - pub fn max_buffer_row(&self) -> u32 { - self.excerpt.max_buffer_row - } -} - -impl ExcerptId { - pub fn min() -> Self { - Self(0) - } - - pub fn max() -> Self { - Self(u32::MAX) - } - - pub fn to_proto(self) -> u64 { - self.0 as _ + .contains(&anchor.text_anchor(), self.buffer_snapshot(snapshot)) } - pub fn from_proto(proto: u64) -> Self { - Self(proto as _) + fn start_anchor(&self) -> ExcerptAnchor { + ExcerptAnchor::in_buffer(self.path_key_index, self.range.context.start) } - pub fn cmp(&self, other: &Self, snapshot: &MultiBufferSnapshot) -> cmp::Ordering { - let a = snapshot.excerpt_locator_for_id(*self); - let b = snapshot.excerpt_locator_for_id(*other); - a.cmp(b).then_with(|| self.0.cmp(&other.0)) + fn end_anchor(&self) -> ExcerptAnchor { + ExcerptAnchor::in_buffer(self.path_key_index, self.range.context.end) } } -impl From for usize { - fn from(val: ExcerptId) -> Self { - val.0 as usize - } -} - -impl fmt::Debug for Excerpt { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Excerpt") - .field("id", &self.id) - .field("locator", &self.locator) - .field("buffer_id", &self.buffer_id) - .field("range", &self.range) - .field("text_summary", &self.text_summary) - .field("has_trailing_newline", &self.has_trailing_newline) - .finish() +impl PartialEq for Excerpt { + fn eq(&self, other: &Self) -> bool { + self.path_key_index == other.path_key_index + && self.buffer_id == other.buffer_id + && self.range.context == other.range.context } } @@ -7998,8 +7313,8 @@ impl sum_tree::Item for Excerpt { text += TextSummary::from("\n"); } ExcerptSummary { - excerpt_id: self.id, - excerpt_locator: self.locator.clone(), + path_key: self.path_key.clone(), + max_anchor: Some(self.range.context.end), widest_line_number: self.max_buffer_row, text: text.into(), count: 1, @@ -8007,22 +7322,6 @@ impl sum_tree::Item for Excerpt { } } -impl sum_tree::Item for ExcerptIdMapping { - type Summary = ExcerptId; - - fn summary(&self, _cx: ()) -> Self::Summary { - self.id - } -} - -impl sum_tree::KeyedItem for ExcerptIdMapping { - type Key = ExcerptId; - - fn key(&self) -> Self::Key { - self.id - } -} - impl DiffTransform { fn hunk_info(&self) -> Option { match self { @@ -8071,45 +7370,98 @@ impl sum_tree::ContextLessSummary for DiffTransformSummary { } } -impl sum_tree::ContextLessSummary for ExcerptId { - fn zero() -> Self { - Self(0) +impl sum_tree::Dimension<'_, ExcerptSummary> for PathKey { + fn zero(_: ::Context<'_>) -> Self { + PathKey::min() } - fn add_summary(&mut self, summary: &Self) { - *self = cmp::max(*self, *summary); + fn add_summary( + &mut self, + summary: &'_ ExcerptSummary, + _cx: ::Context<'_>, + ) { + *self = summary.path_key.clone(); + } +} + +impl sum_tree::Dimension<'_, ExcerptSummary> for MultiBufferOffset { + fn zero(_: ::Context<'_>) -> Self { + MultiBufferOffset::ZERO + } + + fn add_summary( + &mut self, + summary: &'_ ExcerptSummary, + _cx: ::Context<'_>, + ) { + *self += summary.text.len } } impl sum_tree::ContextLessSummary for ExcerptSummary { fn zero() -> Self { - Self::default() + Self::min() } fn add_summary(&mut self, summary: &Self) { debug_assert!( - summary.excerpt_locator > self.excerpt_locator - || self.excerpt_locator == Locator::min(), - "Excerpt locators must be in ascending order: {:?} > {:?}", - summary.excerpt_locator, - self.excerpt_locator + summary.path_key >= self.path_key, + "Path keys must be in ascending order: {:?} > {:?}", + summary.path_key, + self.path_key ); - self.excerpt_locator = summary.excerpt_locator.clone(); + + self.path_key = summary.path_key.clone(); + self.max_anchor = summary.max_anchor; self.text += summary.text; self.widest_line_number = cmp::max(self.widest_line_number, summary.widest_line_number); self.count += summary.count; } } -impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, Option<&'a Locator>> for Locator { - fn cmp(&self, cursor_location: &Option<&'a Locator>, _: ()) -> cmp::Ordering { - Ord::cmp(&Some(self), cursor_location) +impl sum_tree::SeekTarget<'_, ExcerptSummary, ExcerptSummary> for AnchorSeekTarget { + fn cmp( + &self, + cursor_location: &ExcerptSummary, + _cx: ::Context<'_>, + ) -> cmp::Ordering { + match self { + AnchorSeekTarget::Excerpt { + path_key, + anchor, + snapshot, + } => { + let path_comparison = Ord::cmp(path_key, &cursor_location.path_key); + if path_comparison.is_ne() { + path_comparison + } else if let Some(snapshot) = snapshot { + if anchor.text_anchor.buffer_id != snapshot.remote_id() { + Ordering::Greater + } else if let Some(max_anchor) = cursor_location.max_anchor { + debug_assert_eq!(max_anchor.buffer_id, snapshot.remote_id()); + anchor.text_anchor().cmp(&max_anchor, snapshot) + } else { + Ordering::Greater + } + } else { + // shouldn't happen because we expect this buffer not to have any excerpts + // (otherwise snapshot would have been Some) + Ordering::Equal + } + } + // This should be dead code because Empty is only constructed for an empty snapshot + AnchorSeekTarget::Empty => Ordering::Equal, + } } } -impl sum_tree::SeekTarget<'_, ExcerptSummary, ExcerptSummary> for Locator { - fn cmp(&self, cursor_location: &ExcerptSummary, _: ()) -> cmp::Ordering { - Ord::cmp(self, &cursor_location.excerpt_locator) +impl sum_tree::SeekTarget<'_, ExcerptSummary, ExcerptSummary> for PathKey { + fn cmp( + &self, + cursor_location: &ExcerptSummary, + _cx: ::Context<'_>, + ) -> cmp::Ordering { + Ord::cmp(self, &cursor_location.path_key) } } @@ -8126,26 +7478,6 @@ where } } -impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a Locator> { - fn zero(_cx: ()) -> Self { - Default::default() - } - - fn add_summary(&mut self, summary: &'a ExcerptSummary, _: ()) { - *self = Some(&summary.excerpt_locator); - } -} - -impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option { - fn zero(_cx: ()) -> Self { - Default::default() - } - - fn add_summary(&mut self, summary: &'a ExcerptSummary, _: ()) { - *self = Some(summary.excerpt_id); - } -} - #[derive(Copy, Clone, PartialOrd, Ord, Eq, PartialEq, Debug)] struct OutputDimension(T); @@ -8201,7 +7533,7 @@ where } } -#[derive(Copy, Clone, PartialOrd, Ord, Eq, PartialEq, Debug)] +#[derive(Copy, Clone, PartialOrd, Ord, Eq, PartialEq, Debug, Default)] struct ExcerptDimension(T); impl PartialEq for ExcerptDimension { @@ -8361,18 +7693,14 @@ impl Iterator for MultiBufferRows<'_> { .excerpts .item() .or(self.cursor.excerpts.prev_item())?; - let last_row = last_excerpt - .range - .context - .end - .to_point(&last_excerpt.buffer) - .row; + let buffer_snapshot = last_excerpt.buffer_snapshot(self.cursor.snapshot); + let last_row = last_excerpt.range.context.end.to_point(buffer_snapshot).row; let first_row = last_excerpt .range .context .start - .to_point(&last_excerpt.buffer) + .to_point(buffer_snapshot) .row; let expand_info = if self.is_singleton { @@ -8381,7 +7709,7 @@ impl Iterator for MultiBufferRows<'_> { let needs_expand_up = first_row == last_row && last_row > 0 && !region.diff_hunk_status.is_some_and(|d| d.is_deleted()); - let needs_expand_down = last_row < last_excerpt.buffer.max_point().row; + let needs_expand_down = last_row < buffer_snapshot.max_point().row; if needs_expand_up && needs_expand_down { Some(ExpandExcerptDirection::UpAndDown) @@ -8394,7 +7722,7 @@ impl Iterator for MultiBufferRows<'_> { } .map(|direction| ExpandInfo { direction, - excerpt_id: last_excerpt.id, + start_anchor: Anchor::Excerpt(last_excerpt.start_anchor()), }) }; self.point += Point::new(1, 0); @@ -8436,7 +7764,7 @@ impl Iterator for MultiBufferRows<'_> { } .map(|direction| ExpandInfo { direction, - excerpt_id: region.excerpt.id, + start_anchor: Anchor::Excerpt(region.excerpt.start_anchor()), }) }; @@ -8488,18 +7816,20 @@ impl<'a> MultiBufferChunks<'a> { if let Some(excerpt_chunks) = self .excerpt_chunks .as_mut() - .filter(|chunks| excerpt.id == chunks.excerpt_id) + .filter(|chunks| excerpt.end_anchor() == chunks.end) { excerpt.seek_chunks( excerpt_chunks, (self.excerpt_offset_range.start - excerpt_start) ..(self.excerpt_offset_range.end - excerpt_start), + self.snapshot, ); } else { self.excerpt_chunks = Some(excerpt.chunks_in_range( (self.excerpt_offset_range.start - excerpt_start) ..(self.excerpt_offset_range.end - excerpt_start), self.language_aware, + self.snapshot, )); } } else { @@ -8521,6 +7851,7 @@ impl<'a> MultiBufferChunks<'a> { self.excerpt_chunks = Some(excerpt.chunks_in_range( 0..(self.excerpt_offset_range.end - *self.excerpts.start()), self.language_aware, + self.snapshot, )); } } @@ -8636,7 +7967,8 @@ impl<'a> Iterator for MultiBufferChunks<'a> { } chunks } else { - let base_buffer = &find_diff_state(self.diffs, *buffer_id)?.base_text(); + let base_buffer = + &find_diff_state(&self.snapshot.diffs, *buffer_id)?.base_text(); base_buffer.chunks(base_text_start..base_text_end, self.language_aware) }; @@ -8833,12 +8165,6 @@ impl ToPoint for PointUtf16 { } } -impl From for EntityId { - fn from(id: ExcerptId) -> Self { - EntityId::from(id.0 as u64) - } -} - #[cfg(debug_assertions)] pub mod debug { use super::*; diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index e44a38e4abed8438bcdcbf1f2c8c55c465d98e2d..bc904d1a05488ee365ebddf36c3b30accdfb9301 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -77,22 +77,19 @@ fn test_buffer_point_to_anchor_at_end_of_singleton_buffer(cx: &mut App) { let buffer = cx.new(|cx| Buffer::local("abc", cx)); let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx)); - let excerpt_id = multibuffer + let anchor = multibuffer .read(cx) - .excerpt_ids() - .into_iter() - .next() + .buffer_point_to_anchor(&buffer, Point::new(0, 3), cx) .unwrap(); - let anchor = multibuffer + let (anchor, _) = multibuffer .read(cx) - .buffer_point_to_anchor(&buffer, Point::new(0, 3), cx); + .snapshot(cx) + .anchor_to_buffer_anchor(anchor) + .unwrap(); assert_eq!( anchor, - Some(Anchor::in_buffer( - excerpt_id, - buffer.read(cx).snapshot().anchor_after(Point::new(0, 3)), - )) + buffer.read(cx).snapshot().anchor_after(Point::new(0, 3)), ); } @@ -346,7 +343,7 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { ); let snapshot = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts_for_path(PathKey::sorted(1), cx); + multibuffer.remove_excerpts(PathKey::sorted(1), cx); multibuffer.snapshot(cx) }); @@ -373,7 +370,7 @@ fn test_excerpt_boundaries_and_clipping(cx: &mut App) { boundary.row, boundary .next - .buffer + .buffer(snapshot) .text_for_range(boundary.next.range.context) .collect::(), starts_new_buffer, @@ -440,7 +437,7 @@ async fn test_diff_hunks_in_range(cx: &mut TestAppContext) { multibuffer.update(cx, |multibuffer, cx| { multibuffer.add_diff(diff, cx); - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -480,7 +477,7 @@ async fn test_diff_hunks_in_range(cx: &mut TestAppContext) { ); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -521,7 +518,7 @@ async fn test_diff_hunks_in_range_query_starting_at_added_row(cx: &mut TestAppCo multibuffer.update(cx, |multibuffer, cx| { multibuffer.add_diff(diff, cx); - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -766,12 +763,27 @@ fn test_excerpt_events(cx: &mut App) { cx.subscribe( &leader_multibuffer, move |follower, _, event, cx| match event.clone() { - Event::ExcerptsAdded { + Event::BufferRangesUpdated { buffer, - predecessor, - excerpts, - } => follower.insert_excerpts_with_ids_after(predecessor, buffer, excerpts, cx), - Event::ExcerptsRemoved { ids, .. } => follower.remove_excerpts(ids, cx), + path_key, + ranges, + } => { + let buffer_snapshot = buffer.read(cx).snapshot(); + follower.set_merged_excerpt_ranges_for_path( + path_key, + buffer, + &buffer_snapshot, + ranges, + cx, + ); + } + Event::BuffersRemoved { + removed_buffer_ids, .. + } => { + for id in removed_buffer_ids { + follower.remove_excerpts_for_buffer(id, cx); + } + } Event::Edited { .. } => { *follower_edit_event_count.write() += 1; } @@ -885,9 +897,14 @@ fn test_expand_excerpts(cx: &mut App) { drop(snapshot); multibuffer.update(cx, |multibuffer, cx| { - let line_zero = multibuffer.snapshot(cx).anchor_before(Point::new(0, 0)); + let multibuffer_snapshot = multibuffer.snapshot(cx); + let line_zero = multibuffer_snapshot.anchor_before(Point::new(0, 0)); multibuffer.expand_excerpts( - multibuffer.excerpt_ids(), + multibuffer.snapshot(cx).excerpts().map(|excerpt| { + multibuffer_snapshot + .anchor_in_excerpt(excerpt.context.start) + .unwrap() + }), 1, ExpandExcerptDirection::UpAndDown, cx, @@ -1184,16 +1201,10 @@ fn test_multibuffer_anchors(cx: &mut App) { .to_offset(&old_snapshot), MultiBufferOffset(0) ); - assert_eq!(Anchor::min().to_offset(&old_snapshot), MultiBufferOffset(0)); - assert_eq!(Anchor::min().to_offset(&old_snapshot), MultiBufferOffset(0)); - assert_eq!( - Anchor::max().to_offset(&old_snapshot), - MultiBufferOffset(10) - ); - assert_eq!( - Anchor::max().to_offset(&old_snapshot), - MultiBufferOffset(10) - ); + assert_eq!(Anchor::Min.to_offset(&old_snapshot), MultiBufferOffset(0)); + assert_eq!(Anchor::Min.to_offset(&old_snapshot), MultiBufferOffset(0)); + assert_eq!(Anchor::Max.to_offset(&old_snapshot), MultiBufferOffset(10)); + assert_eq!(Anchor::Max.to_offset(&old_snapshot), MultiBufferOffset(10)); buffer_1.update(cx, |buffer, cx| { buffer.edit([(0..0, "W")], None, cx); @@ -1270,153 +1281,6 @@ fn test_multibuffer_anchors(cx: &mut App) { ); } -#[gpui::test] -fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut App) { - let buffer_1 = cx.new(|cx| Buffer::local("abcd", cx)); - let buffer_2 = cx.new(|cx| Buffer::local("ABCDEFGHIJKLMNOP", cx)); - let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - - // Create an insertion id in buffer 1 that doesn't exist in buffer 2. - // Add an excerpt from buffer 1 that spans this new insertion. - buffer_1.update(cx, |buffer, cx| buffer.edit([(4..4, "123")], None, cx)); - let excerpt_id_1 = multibuffer.update(cx, |multibuffer, cx| { - let buffer_1_snapshot = buffer_1.read(cx).snapshot(); - multibuffer.set_excerpt_ranges_for_path( - PathKey::sorted(0), - buffer_1, - &buffer_1_snapshot, - vec![ExcerptRange::new((0..7).to_point(&buffer_1_snapshot))], - cx, - ); - multibuffer.excerpt_ids().into_iter().next().unwrap() - }); - - let snapshot_1 = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot_1.text(), "abcd123"); - - // Replace the buffer 1 excerpt with new excerpts from buffer 2. - let (excerpt_id_2, _excerpt_id_3) = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts_for_path(PathKey::sorted(0), cx); - let snapshot_2 = buffer_2.read(cx).snapshot(); - multibuffer.set_excerpt_ranges_for_path( - PathKey::sorted(1), - buffer_2.clone(), - &buffer_2.read(cx).snapshot(), - vec![ - ExcerptRange::new((0..4).to_point(&snapshot_2)), - ExcerptRange::new((6..10).to_point(&snapshot_2)), - ExcerptRange::new((12..16).to_point(&snapshot_2)), - ], - cx, - ); - let mut ids = multibuffer - .excerpts_for_buffer(buffer_2.read(cx).remote_id(), cx) - .into_iter() - .map(|(id, _, _)| id); - (ids.next().unwrap(), ids.next().unwrap()) - }); - let snapshot_2 = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot_2.text(), "ABCD\nGHIJ\nMNOP"); - - // The old excerpt id doesn't get reused. - assert_ne!(excerpt_id_2, excerpt_id_1); - - // Resolve some anchors from the previous snapshot in the new snapshot. - // The current excerpts are from a different buffer, so we don't attempt to - // resolve the old text anchor in the new buffer. - assert_eq!( - snapshot_2.summary_for_anchor::( - &snapshot_1.anchor_before(MultiBufferOffset(2)) - ), - MultiBufferOffset(0) - ); - assert_eq!( - snapshot_2.summaries_for_anchors::(&[ - snapshot_1.anchor_before(MultiBufferOffset(2)), - snapshot_1.anchor_after(MultiBufferOffset(3)) - ]), - vec![MultiBufferOffset(0), MultiBufferOffset(0)] - ); - - // Refresh anchors from the old snapshot. The return value indicates that both - // anchors lost their original excerpt. - let refresh = snapshot_2.refresh_anchors(&[ - snapshot_1.anchor_before(MultiBufferOffset(2)), - snapshot_1.anchor_after(MultiBufferOffset(3)), - ]); - assert_eq!( - refresh, - &[ - (0, snapshot_2.anchor_before(MultiBufferOffset(0)), false), - (1, snapshot_2.anchor_after(MultiBufferOffset(0)), false), - ] - ); - - // Replace the middle excerpt with a smaller excerpt in buffer 2, - // that intersects the old excerpt. - multibuffer.update(cx, |multibuffer, cx| { - let snapshot_2 = buffer_2.read(cx).snapshot(); - multibuffer.set_excerpt_ranges_for_path( - PathKey::sorted(1), - buffer_2.clone(), - &buffer_2.read(cx).snapshot(), - vec![ - ExcerptRange::new((0..4).to_point(&snapshot_2)), - ExcerptRange::new((12..16).to_point(&snapshot_2)), - ], - cx, - ); - multibuffer.set_excerpt_ranges_for_path( - PathKey::sorted(1), - buffer_2.clone(), - &buffer_2.read(cx).snapshot(), - vec![ - ExcerptRange::new((0..4).to_point(&snapshot_2)), - ExcerptRange::new((5..8).to_point(&snapshot_2)), - ExcerptRange::new((12..16).to_point(&snapshot_2)), - ], - cx, - ); - }); - - let snapshot_3 = multibuffer.read(cx).snapshot(cx); - assert_eq!(snapshot_3.text(), "ABCD\nFGH\nMNOP"); - - // Resolve some anchors from the previous snapshot in the new snapshot. - // The third anchor can't be resolved, since its excerpt has been removed, - // so it resolves to the same position as its predecessor. - let anchors = [ - snapshot_2.anchor_before(MultiBufferOffset(0)), - snapshot_2.anchor_after(MultiBufferOffset(2)), - snapshot_2.anchor_after(MultiBufferOffset(6)), - snapshot_2.anchor_after(MultiBufferOffset(14)), - ]; - assert_eq!( - snapshot_3.summaries_for_anchors::(&anchors), - &[ - MultiBufferOffset(0), - MultiBufferOffset(2), - MultiBufferOffset(9), - MultiBufferOffset(13) - ] - ); - - let new_anchors = snapshot_3.refresh_anchors(&anchors); - assert_eq!( - new_anchors.iter().map(|a| (a.0, a.2)).collect::>(), - &[(0, true), (1, true), (2, true), (3, true)] - ); - assert_eq!( - snapshot_3.summaries_for_anchors::(new_anchors.iter().map(|a| &a.1)), - &[ - MultiBufferOffset(0), - MultiBufferOffset(2), - MultiBufferOffset(7), - MultiBufferOffset(13) - ] - ); -} - #[gpui::test] async fn test_basic_diff_hunks(cx: &mut TestAppContext) { let text = indoc!( @@ -1467,7 +1331,7 @@ async fn test_basic_diff_hunks(cx: &mut TestAppContext) { ); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -1513,7 +1377,7 @@ async fn test_basic_diff_hunks(cx: &mut TestAppContext) { assert_line_indents(&snapshot); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx) + multibuffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx) }); assert_new_snapshot( &multibuffer, @@ -1700,7 +1564,7 @@ async fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) { }); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -1751,7 +1615,7 @@ async fn test_repeatedly_expand_a_diff_hunk(cx: &mut TestAppContext) { // Now collapse all diff hunks multibuffer.update(cx, |multibuffer, cx| { - multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.collapse_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -2097,6 +1961,203 @@ fn test_set_excerpts_for_buffer(cx: &mut TestAppContext) { }); } +#[gpui::test] +fn test_update_excerpt_ranges_for_path(cx: &mut TestAppContext) { + let buffer = cx.new(|cx| { + Buffer::local( + indoc! { + "row 0 + row 1 + row 2 + row 3 + row 4 + row 5 + row 6 + row 7 + row 8 + row 9 + row 10 + row 11 + row 12 + row 13 + row 14 + "}, + cx, + ) + }); + let path = PathKey::with_sort_prefix(0, rel_path("test.rs").into_arc()); + + let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(2..4), Point::row_range(8..10)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 2 + row 3 + row 4 + ----- + row 8 + row 9 + row 10 + "}, + ); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(12..13)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 12 + row 13 + "}, + ); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(2..4)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 2 + row 3 + row 4 + "}, + ); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(3..5)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 2 + row 3 + row 4 + row 5 + "}, + ); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![ + Point::row_range(0..1), + Point::row_range(6..8), + Point::row_range(12..13), + ], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 0 + row 1 + ----- + row 6 + row 7 + row 8 + ----- + row 12 + row 13 + "}, + ); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(7..9)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 6 + row 7 + row 8 + row 9 + "}, + ); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(2..3), Point::row_range(6..7)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 2 + row 3 + ----- + row 6 + row 7 + "}, + ); + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update_excerpts_for_path( + path.clone(), + buffer.clone(), + vec![Point::row_range(3..6)], + 0, + cx, + ); + }); + assert_excerpts_match( + &multibuffer, + cx, + indoc! {"----- + row 2 + row 3 + row 4 + row 5 + row 6 + row 7 + "}, + ); +} + #[gpui::test] fn test_set_excerpts_for_buffer_rename(cx: &mut TestAppContext) { let buf1 = cx.new(|cx| { @@ -2179,19 +2240,418 @@ fn test_set_excerpts_for_buffer_rename(cx: &mut TestAppContext) { } #[gpui::test] -async fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { - let base_text_1 = indoc!( - " - one - two +fn test_set_excerpts_for_path_replaces_previous_buffer(cx: &mut TestAppContext) { + let buffer_a = cx.new(|cx| { + Buffer::local( + indoc! { + "alpha + beta + gamma + delta + epsilon + ", + }, + cx, + ) + }); + let buffer_b = cx.new(|cx| { + Buffer::local( + indoc! { + "one + two three - four - five - six + four + ", + }, + cx, + ) + }); + let path: PathKey = PathKey::with_sort_prefix(0, rel_path("shared/path").into_arc()); + + let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); + let removed_buffer_ids: Arc>> = Default::default(); + multibuffer.update(cx, |_, cx| { + let removed_buffer_ids = removed_buffer_ids.clone(); + cx.subscribe(&multibuffer, move |_, _, event, _| { + if let Event::BuffersRemoved { + removed_buffer_ids: ids, + } = event + { + removed_buffer_ids.write().extend(ids.iter().copied()); + } + }) + .detach(); + }); + + let ranges_a = vec![Point::row_range(0..1), Point::row_range(3..4)]; + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path(path.clone(), buffer_a.clone(), ranges_a.clone(), 0, cx); + }); + let (anchor_a1, anchor_a2) = multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + let buffer_snapshot = buffer_a.read(cx).snapshot(); + let mut anchors = ranges_a.into_iter().filter_map(|range| { + let text_range = buffer_snapshot.anchor_range_inside(range); + let start = snapshot.anchor_in_buffer(text_range.start)?; + let end = snapshot.anchor_in_buffer(text_range.end)?; + Some(start..end) + }); + ( + anchors.next().expect("should have first anchor"), + anchors.next().expect("should have second anchor"), + ) + }); + + assert_excerpts_match( + &multibuffer, + cx, + indoc! { + "----- + alpha + beta + ----- + delta + epsilon " + }, ); - let text_1 = indoc!( - " + + let buffer_a_id = buffer_a.read_with(cx, |buffer, _| buffer.remote_id()); + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + assert!( + snapshot + .excerpts() + .any(|excerpt| excerpt.context.start.buffer_id == buffer_a_id), + ); + }); + + let ranges_b = vec![Point::row_range(1..2)]; + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path(path.clone(), buffer_b.clone(), ranges_b.clone(), 1, cx); + }); + let anchor_b = multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + let buffer_snapshot = buffer_b.read(cx).snapshot(); + ranges_b + .into_iter() + .filter_map(|range| { + let text_range = buffer_snapshot.anchor_range_inside(range); + let start = snapshot.anchor_in_buffer(text_range.start)?; + let end = snapshot.anchor_in_buffer(text_range.end)?; + Some(start..end) + }) + .next() + .expect("should have an anchor") + }); + + let buffer_b_id = buffer_b.read_with(cx, |buffer, _| buffer.remote_id()); + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + assert!( + !snapshot + .excerpts() + .any(|excerpt| excerpt.context.start.buffer_id == buffer_a_id), + ); + assert!( + snapshot + .excerpts() + .any(|excerpt| excerpt.context.start.buffer_id == buffer_b_id), + ); + assert!( + multibuffer.buffer(buffer_a_id).is_none(), + "old buffer should be fully removed from the multibuffer" + ); + assert!( + multibuffer.buffer(buffer_b_id).is_some(), + "new buffer should be present in the multibuffer" + ); + }); + assert!( + removed_buffer_ids.read().contains(&buffer_a_id), + "BuffersRemoved event should have been emitted for the old buffer" + ); + + assert_excerpts_match( + &multibuffer, + cx, + indoc! { + "----- + one + two + three + four + " + }, + ); + + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + anchor_a1.start.cmp(&anchor_b.start, &snapshot); + anchor_a1.end.cmp(&anchor_b.end, &snapshot); + anchor_a1.start.cmp(&anchor_a2.start, &snapshot); + anchor_a1.end.cmp(&anchor_a2.end, &snapshot); + }); +} + +#[gpui::test] +fn test_stale_anchor_after_buffer_removal_and_path_reuse(cx: &mut TestAppContext) { + let buffer_a = cx.new(|cx| Buffer::local("aaa\nbbb\nccc\n", cx)); + let buffer_b = cx.new(|cx| Buffer::local("xxx\nyyy\nzzz\n", cx)); + let buffer_other = cx.new(|cx| Buffer::local("111\n222\n333\n", cx)); + let path = PathKey::with_sort_prefix(0, rel_path("the/path").into_arc()); + let other_path = PathKey::with_sort_prefix(1, rel_path("other/path").into_arc()); + + let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer_a.clone(), + [Point::new(0, 0)..Point::new(2, 3)], + 0, + cx, + ); + multibuffer.set_excerpts_for_path( + other_path.clone(), + buffer_other.clone(), + [Point::new(0, 0)..Point::new(2, 3)], + 0, + cx, + ); + }); + + buffer_a.update(cx, |buffer, cx| { + buffer.edit( + [(Point::new(1, 0)..Point::new(1, 0), "INSERTED ")], + None, + cx, + ); + }); + + let stale_anchor = multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + snapshot.anchor_before(Point::new(1, 5)) + }); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.remove_excerpts(path.clone(), cx); + }); + + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + let offset = stale_anchor.to_offset(&snapshot); + assert!( + offset.0 <= snapshot.len().0, + "stale anchor resolved to offset {offset:?} but multibuffer len is {:?}", + snapshot.len() + ); + }); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + path.clone(), + buffer_b.clone(), + [Point::new(0, 0)..Point::new(2, 3)], + 0, + cx, + ); + }); + + multibuffer.read_with(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + let offset = stale_anchor.to_offset(&snapshot); + assert!( + offset.0 <= snapshot.len().0, + "stale anchor resolved to offset {offset:?} but multibuffer len is {:?}", + snapshot.len() + ); + }); +} + +#[gpui::test] +async fn test_map_excerpt_ranges(cx: &mut TestAppContext) { + let base_text = indoc!( + " + { + (aaa) + (bbb) + (ccc) + } + xxx + yyy + zzz + [ + (ddd) + (EEE) + ] + " + ); + let text = indoc!( + " + { + (aaa) + (CCC) + } + xxx + yyy + zzz + [ + (ddd) + (EEE) + ] + " + ); + + let buffer = cx.new(|cx| Buffer::local(text, cx)); + let diff = cx + .new(|cx| BufferDiff::new_with_base_text(base_text, &buffer.read(cx).text_snapshot(), cx)); + cx.run_until_parked(); + + let multibuffer = cx.new(|cx| { + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer.clone(), + [ + Point::new(0, 0)..Point::new(3, 1), + Point::new(7, 0)..Point::new(10, 1), + ], + 0, + cx, + ); + multibuffer.add_diff(diff.clone(), cx); + multibuffer + }); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); + }); + cx.run_until_parked(); + + let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); + + let actual_diff = format_diff( + &snapshot.text(), + &snapshot.row_infos(MultiBufferRow(0)).collect::>(), + &Default::default(), + None, + ); + pretty_assertions::assert_eq!( + actual_diff, + indoc!( + " + { + (aaa) + - (bbb) + - (ccc) + + (CCC) + } [\u{2193}] + [ [\u{2191}] + (ddd) + (EEE) + ] [\u{2193}]" + ) + ); + + assert_eq!( + snapshot.map_excerpt_ranges( + snapshot.point_to_offset(Point::new(1, 3))..snapshot.point_to_offset(Point::new(1, 3)), + |buffer, excerpt_range, input_range| { + assert_eq!( + buffer.offset_to_point(input_range.start.0) + ..buffer.offset_to_point(input_range.end.0), + Point::new(1, 3)..Point::new(1, 3), + ); + assert_eq!( + buffer.offset_to_point(excerpt_range.context.start.0) + ..buffer.offset_to_point(excerpt_range.context.end.0), + Point::new(0, 0)..Point::new(3, 1), + ); + vec![ + (input_range.start..BufferOffset(input_range.start.0 + 3), ()), + (excerpt_range.context, ()), + ( + BufferOffset(text::ToOffset::to_offset(&Point::new(2, 2), buffer)) + ..BufferOffset(text::ToOffset::to_offset(&Point::new(2, 7), buffer)), + (), + ), + ( + BufferOffset(text::ToOffset::to_offset(&Point::new(0, 0), buffer)) + ..BufferOffset(text::ToOffset::to_offset(&Point::new(2, 0), buffer)), + (), + ), + ] + }, + ), + Some(vec![ + ( + snapshot.point_to_offset(Point::new(1, 3)) + ..snapshot.point_to_offset(Point::new(1, 6)), + (), + ), + ( + snapshot.point_to_offset(Point::zero())..snapshot.point_to_offset(Point::new(5, 1)), + () + ), + ( + snapshot.point_to_offset(Point::new(4, 2)) + ..snapshot.point_to_offset(Point::new(4, 7)), + (), + ), + ( + snapshot.point_to_offset(Point::zero())..snapshot.point_to_offset(Point::new(4, 0)), + () + ), + ]), + ); + + assert_eq!( + snapshot.map_excerpt_ranges( + snapshot.point_to_offset(Point::new(5, 0))..snapshot.point_to_offset(Point::new(7, 0)), + |_, _, range| vec![(range, ())], + ), + None, + ); + + assert_eq!( + snapshot.map_excerpt_ranges( + snapshot.point_to_offset(Point::new(7, 3))..snapshot.point_to_offset(Point::new(7, 6)), + |buffer, excerpt_range, input_range| { + assert_eq!( + buffer.offset_to_point(input_range.start.0) + ..buffer.offset_to_point(input_range.end.0), + Point::new(8, 3)..Point::new(8, 6), + ); + assert_eq!( + buffer.offset_to_point(excerpt_range.context.start.0) + ..buffer.offset_to_point(excerpt_range.context.end.0), + Point::new(7, 0)..Point::new(10, 1), + ); + vec![(input_range, ())] + }, + ), + Some(vec![( + snapshot.point_to_offset(Point::new(7, 3))..snapshot.point_to_offset(Point::new(7, 6)), + (), + )]), + ); +} + +#[gpui::test] +async fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { + let base_text_1 = indoc!( + " + one + two + three + four + five + six + " + ); + let text_1 = indoc!( + " ZERO one TWO @@ -2273,7 +2733,7 @@ async fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { ); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); assert_new_snapshot( @@ -2423,101 +2883,137 @@ struct ReferenceMultibuffer { excerpts: Vec, diffs: HashMap>, inverted_diffs: HashMap, Entity)>, + expanded_diff_hunks_by_buffer: HashMap>, } -#[derive(Debug)] +#[derive(Clone, Debug)] struct ReferenceExcerpt { - id: ExcerptId, + path_key: PathKey, + path_key_index: PathKeyIndex, buffer: Entity, range: Range, - expanded_diff_hunks: Vec, } -#[derive(Debug)] +#[derive(Clone, Debug)] struct ReferenceRegion { buffer_id: Option, range: Range, - buffer_range: Option>, + buffer_range: Range, + // if this is a deleted hunk, the main buffer anchor to which the deleted content is attached + deleted_hunk_anchor: Option, status: Option, - excerpt_id: Option, + excerpt: Option, } impl ReferenceMultibuffer { - fn expand_excerpts(&mut self, excerpts: &HashSet, line_count: u32, cx: &App) { - if line_count == 0 { + fn expand_excerpts( + &mut self, + excerpts: &HashSet>, + line_count: u32, + cx: &mut App, + ) { + use text::AnchorRangeExt as _; + + if line_count == 0 || excerpts.is_empty() { return; } - for id in excerpts { - let excerpt = self.excerpts.iter_mut().find(|e| e.id == *id).unwrap(); - let snapshot = excerpt.buffer.read(cx).snapshot(); - let mut point_range = excerpt.range.to_point(&snapshot); - point_range.start = Point::new(point_range.start.row.saturating_sub(line_count), 0); - point_range.end = - snapshot.clip_point(Point::new(point_range.end.row + line_count, 0), Bias::Left); - point_range.end.column = snapshot.line_len(point_range.end.row); - excerpt.range = - snapshot.anchor_before(point_range.start)..snapshot.anchor_after(point_range.end); + let mut excerpts_by_buffer: HashMap>> = + HashMap::default(); + for excerpt in excerpts { + excerpts_by_buffer + .entry(excerpt.context.start.buffer_id) + .or_default() + .push(excerpt.clone()) } - } - fn remove_excerpt(&mut self, id: ExcerptId, cx: &App) { - let ix = self - .excerpts - .iter() - .position(|excerpt| excerpt.id == id) - .unwrap(); - let excerpt = self.excerpts.remove(ix); - let buffer = excerpt.buffer.read(cx); - let buffer_id = buffer.remote_id(); - log::info!( - "Removing excerpt {}: {:?}", - ix, - buffer - .text_for_range(excerpt.range.to_offset(buffer)) - .collect::(), - ); - if !self - .excerpts - .iter() - .any(|excerpt| excerpt.buffer.read(cx).remote_id() == buffer_id) - { - self.diffs.remove(&buffer_id); - self.inverted_diffs.remove(&buffer_id); + for (buffer_id, excerpts_to_expand) in excerpts_by_buffer { + let mut buffer = None; + let mut buffer_snapshot = None; + let mut path = None; + let mut path_key_index = None; + let mut new_ranges = + self.excerpts + .iter() + .filter(|excerpt| excerpt.range.start.buffer_id == buffer_id) + .map(|excerpt| { + let snapshot = excerpt.buffer.read(cx).snapshot(); + let mut range = excerpt.range.to_point(&snapshot); + if excerpts_to_expand.iter().any(|info| { + excerpt.range.contains_anchor(info.context.start, &snapshot) + }) { + range.start = Point::new(range.start.row.saturating_sub(line_count), 0); + range.end = snapshot + .clip_point(Point::new(range.end.row + line_count, 0), Bias::Left); + range.end.column = snapshot.line_len(range.end.row); + } + buffer = Some(excerpt.buffer.clone()); + buffer_snapshot = Some(snapshot); + path = Some(excerpt.path_key.clone()); + path_key_index = Some(excerpt.path_key_index); + ExcerptRange::new(range) + }) + .collect::>(); + + new_ranges.sort_by(|l, r| l.context.start.cmp(&r.context.start)); + + self.set_excerpts( + path.unwrap(), + path_key_index.unwrap(), + buffer.unwrap(), + &buffer_snapshot.unwrap(), + new_ranges, + cx, + ); } } - fn insert_excerpt_after( + fn set_excerpts( &mut self, - prev_id: ExcerptId, - new_excerpt_id: ExcerptId, - (buffer_handle, anchor_range): (Entity, Range), + path_key: PathKey, + path_key_index: PathKeyIndex, + buffer: Entity, + buffer_snapshot: &BufferSnapshot, + ranges: Vec>, + cx: &mut App, ) { - let excerpt_ix = if prev_id == ExcerptId::max() { - self.excerpts.len() - } else { - self.excerpts - .iter() - .position(|excerpt| excerpt.id == prev_id) - .unwrap() - + 1 - }; - self.excerpts.insert( - excerpt_ix, - ReferenceExcerpt { - id: new_excerpt_id, - buffer: buffer_handle, - range: anchor_range, - expanded_diff_hunks: Vec::new(), - }, + self.excerpts.retain(|excerpt| { + excerpt.path_key != path_key && excerpt.buffer.entity_id() != buffer.entity_id() + }); + + let ranges = MultiBuffer::merge_excerpt_ranges(&ranges); + + let (Ok(ix) | Err(ix)) = self + .excerpts + .binary_search_by(|probe| probe.path_key.cmp(&path_key)); + self.excerpts.splice( + ix..ix, + ranges.into_iter().map(|range| ReferenceExcerpt { + path_key: path_key.clone(), + path_key_index, + buffer: buffer.clone(), + range: buffer_snapshot.anchor_before(range.context.start) + ..buffer_snapshot.anchor_after(range.context.end), + }), ); + self.update_expanded_diff_hunks_for_buffer(buffer_snapshot.remote_id(), cx); } - fn expand_diff_hunks(&mut self, excerpt_id: ExcerptId, range: Range, cx: &App) { + fn expand_diff_hunks(&mut self, path_key: PathKey, range: Range, cx: &App) { let excerpt = self .excerpts .iter_mut() - .find(|e| e.id == excerpt_id) + .find(|e| { + e.path_key == path_key + && e.range + .start + .cmp(&range.start, &e.buffer.read(cx).snapshot()) + .is_le() + && e.range + .end + .cmp(&range.end, &e.buffer.read(cx).snapshot()) + .is_ge() + }) .unwrap(); let buffer = excerpt.buffer.read(cx).snapshot(); let buffer_id = buffer.remote_id(); @@ -2530,36 +3026,47 @@ impl ReferenceMultibuffer { let Some(diff) = self.diffs.get(&buffer_id) else { return; }; - let excerpt_range = excerpt.range.to_offset(&buffer); + let excerpt_range = excerpt.range.to_point(&buffer); + let expanded_diff_hunks = self + .expanded_diff_hunks_by_buffer + .entry(buffer_id) + .or_default(); for hunk in diff .read(cx) .snapshot(cx) .hunks_intersecting_range(range, &buffer) { - let hunk_range = hunk.buffer_range.to_offset(&buffer); + let hunk_range = hunk.buffer_range.to_point(&buffer); if hunk_range.start < excerpt_range.start || hunk_range.start > excerpt_range.end { continue; } - if let Err(ix) = excerpt - .expanded_diff_hunks + if let Err(ix) = expanded_diff_hunks .binary_search_by(|anchor| anchor.cmp(&hunk.buffer_range.start, &buffer)) { log::info!( - "expanding diff hunk {:?}. excerpt:{:?}, excerpt range:{:?}", + "expanding diff hunk {:?}. excerpt range: {:?}, buffer {:?}", hunk_range, - excerpt_id, - excerpt_range + excerpt_range, + buffer.remote_id() ); - excerpt - .expanded_diff_hunks - .insert(ix, hunk.buffer_range.start); + expanded_diff_hunks.insert(ix, hunk.buffer_range.start); } else { - log::trace!("hunk {hunk_range:?} already expanded in excerpt {excerpt_id:?}"); + log::trace!("hunk {hunk_range:?} already expanded in excerpt"); } } } - fn expected_content(&self, cx: &App) -> (String, Vec, HashSet) { + fn expected_content( + &self, + cx: &App, + ) -> ( + String, + Vec, + HashSet, + Vec, + ) { + use util::maybe; + let mut text = String::new(); let mut regions = Vec::::new(); let mut excerpt_boundary_rows = HashSet::default(); @@ -2595,11 +3102,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(buffer_id), range: len..text.len(), - buffer_range: Some( - (offset..hunk_base_range.start).to_point(&buffer), - ), + buffer_range: (offset..hunk_base_range.start).to_point(&buffer), status: None, - excerpt_id: Some(excerpt.id), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }); } } @@ -2611,9 +3117,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(buffer_id), range: len..text.len(), - buffer_range: Some(hunk_base_range.to_point(&buffer)), + buffer_range: hunk_base_range.to_point(&buffer), status: Some(DiffHunkStatus::deleted(hunk.secondary_status)), - excerpt_id: Some(excerpt.id), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }); } @@ -2627,9 +3134,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(buffer_id), range: len..text.len(), - buffer_range: Some((offset..buffer_range.end).to_point(&buffer)), + buffer_range: (offset..buffer_range.end).to_point(&buffer), status: None, - excerpt_id: Some(excerpt.id), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }); } else { let diff = self.diffs.get(&buffer_id).unwrap().read(cx).snapshot(cx); @@ -2651,10 +3159,18 @@ impl ReferenceMultibuffer { continue; } - if !excerpt.expanded_diff_hunks.iter().any(|expanded_anchor| { - expanded_anchor.to_offset(buffer).max(buffer_range.start) - == hunk_range.start.max(buffer_range.start) - }) { + if !self + .expanded_diff_hunks_by_buffer + .get(&buffer_id) + .cloned() + .into_iter() + .flatten() + .any(|expanded_anchor| { + expanded_anchor + .cmp(&hunk.buffer_range.start, buffer) + .is_eq() + }) + { log::trace!("skipping a hunk that's not marked as expanded"); continue; } @@ -2672,9 +3188,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(buffer_id), range: len..text.len(), - buffer_range: Some((offset..hunk_range.start).to_point(&buffer)), + buffer_range: (offset..hunk_range.start).to_point(&buffer), status: None, - excerpt_id: Some(excerpt.id), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }); } @@ -2691,11 +3208,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(base_buffer.remote_id()), range: len..text.len(), - buffer_range: Some( - hunk.diff_base_byte_range.to_point(&base_buffer), - ), + buffer_range: hunk.diff_base_byte_range.to_point(&base_buffer), status: Some(DiffHunkStatus::deleted(hunk.secondary_status)), - excerpt_id: Some(excerpt.id), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: Some(hunk.buffer_range.start), }); } @@ -2710,9 +3226,10 @@ impl ReferenceMultibuffer { let region = ReferenceRegion { buffer_id: Some(buffer_id), range, - buffer_range: Some((offset..hunk_range.end).to_point(&buffer)), + buffer_range: (offset..hunk_range.end).to_point(&buffer), status: Some(DiffHunkStatus::added(hunk.secondary_status)), - excerpt_id: Some(excerpt.id), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }; offset = hunk_range.end; regions.push(region); @@ -2726,9 +3243,10 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: Some(buffer_id), range: len..text.len(), - buffer_range: Some((offset..buffer_range.end).to_point(&buffer)), + buffer_range: (offset..buffer_range.end).to_point(&buffer), status: None, - excerpt_id: Some(excerpt.id), + excerpt: Some(excerpt.clone()), + deleted_hunk_anchor: None, }); } } @@ -2738,12 +3256,16 @@ impl ReferenceMultibuffer { regions.push(ReferenceRegion { buffer_id: None, range: 0..1, - buffer_range: Some(Point::new(0, 0)..Point::new(0, 1)), + buffer_range: Point::new(0, 0)..Point::new(0, 1), status: None, - excerpt_id: None, + excerpt: None, + deleted_hunk_anchor: None, }); } else { text.pop(); + let region = regions.last_mut().unwrap(); + assert!(region.deleted_hunk_anchor.is_none()); + region.range.end -= 1; } // Retrieve the row info using the region that contains @@ -2754,37 +3276,38 @@ impl ReferenceMultibuffer { .map(|line| { let row_info = regions .iter() - .position(|region| region.range.contains(&ix)) + .rposition(|region| { + region.range.contains(&ix) || (ix == text.len() && ix == region.range.end) + }) .map_or(RowInfo::default(), |region_ix| { - let region = ®ions[region_ix]; - let buffer_row = region.buffer_range.as_ref().map(|buffer_range| { - buffer_range.start.row - + text[region.range.start..ix].matches('\n').count() as u32 - }); - let main_buffer = self - .excerpts - .iter() - .find(|e| e.id == region.excerpt_id.unwrap()) - .map(|e| e.buffer.clone()); + let region = regions[region_ix].clone(); + let buffer_row = region.buffer_range.start.row + + text[region.range.start..ix].matches('\n').count() as u32; + let main_buffer = region.excerpt.as_ref().map(|e| e.buffer.clone()); + let excerpt_range = region.excerpt.as_ref().map(|e| &e.range); let is_excerpt_start = region_ix == 0 - || ®ions[region_ix - 1].excerpt_id != ®ion.excerpt_id + || regions[region_ix - 1].excerpt.as_ref().map(|e| &e.range) + != excerpt_range || regions[region_ix - 1].range.is_empty(); let mut is_excerpt_end = region_ix == regions.len() - 1 - || ®ions[region_ix + 1].excerpt_id != ®ion.excerpt_id; + || regions[region_ix + 1].excerpt.as_ref().map(|e| &e.range) + != excerpt_range; let is_start = !text[region.range.start..ix].contains('\n'); + let is_last_region = region_ix == regions.len() - 1; let mut is_end = if region.range.end > text.len() { !text[ix..].contains('\n') } else { - text[ix..region.range.end.min(text.len())] + let remaining_newlines = text[ix..region.range.end.min(text.len())] .matches('\n') - .count() - == 1 + .count(); + remaining_newlines == if is_last_region { 0 } else { 1 } }; if region_ix < regions.len() - 1 && !text[ix..].contains("\n") && (region.status == Some(DiffHunkStatus::added_none()) || region.status.is_some_and(|s| s.is_deleted())) - && regions[region_ix + 1].excerpt_id == region.excerpt_id + && regions[region_ix + 1].excerpt.as_ref().map(|e| &e.range) + == excerpt_range && regions[region_ix + 1].range.start == text.len() { is_end = true; @@ -2794,7 +3317,6 @@ impl ReferenceMultibuffer { MultiBufferRow(text[..ix].matches('\n').count() as u32); let mut expand_direction = None; if let Some(buffer) = &main_buffer { - let buffer_row = buffer_row.unwrap(); let needs_expand_up = is_excerpt_start && is_start && buffer_row > 0; let needs_expand_down = is_excerpt_end && is_end @@ -2812,16 +3334,21 @@ impl ReferenceMultibuffer { RowInfo { buffer_id: region.buffer_id, diff_status: region.status, - buffer_row, + buffer_row: Some(buffer_row), wrapped_buffer_row: None, multibuffer_row: Some(multibuffer_row), - expand_info: expand_direction.zip(region.excerpt_id).map( - |(direction, excerpt_id)| ExpandInfo { + expand_info: maybe!({ + let direction = expand_direction?; + let excerpt = region.excerpt.as_ref()?; + Some(ExpandInfo { direction, - excerpt_id, - }, - ), + start_anchor: Anchor::in_buffer( + excerpt.path_key_index, + excerpt.range.start, + ), + }) + }), } }); ix += line.len() + 1; @@ -2829,61 +3356,159 @@ impl ReferenceMultibuffer { }) .collect(); - (text, row_infos, excerpt_boundary_rows) + (text, row_infos, excerpt_boundary_rows, regions) } - fn diffs_updated(&mut self, cx: &App) { - for excerpt in &mut self.excerpts { - let buffer = excerpt.buffer.read(cx).snapshot(); - let buffer_id = buffer.remote_id(); + fn diffs_updated(&mut self, cx: &mut App) { + let buffer_ids = self.diffs.keys().copied().collect::>(); + for buffer_id in buffer_ids { + self.update_expanded_diff_hunks_for_buffer(buffer_id, cx); + } + } - // Skip inverted diff excerpts - hunks are always expanded - if self.inverted_diffs.contains_key(&buffer_id) { - continue; - } + fn add_diff(&mut self, diff: Entity, cx: &mut App) { + let buffer_id = diff.read(cx).buffer_id; + self.diffs.insert(buffer_id, diff); + } - let excerpt_range = excerpt.range.to_offset(&buffer); - let Some(diff) = self.diffs.get(&buffer_id) else { - continue; - }; - let diff = diff.read(cx).snapshot(cx); - let mut hunks = diff.hunks_in_row_range(0..u32::MAX, &buffer).peekable(); - excerpt.expanded_diff_hunks.retain(|hunk_anchor| { - if !hunk_anchor.is_valid(&buffer) { + fn add_inverted_diff( + &mut self, + diff: Entity, + main_buffer: Entity, + cx: &App, + ) { + let base_text_buffer_id = diff.read(cx).base_text(cx).remote_id(); + self.inverted_diffs + .insert(base_text_buffer_id, (diff, main_buffer)); + } + + fn update_expanded_diff_hunks_for_buffer(&mut self, buffer_id: BufferId, cx: &mut App) { + let excerpts = self + .excerpts + .iter() + .filter(|excerpt| excerpt.buffer.read(cx).remote_id() == buffer_id) + .collect::>(); + let Some(buffer) = excerpts.first().map(|excerpt| excerpt.buffer.clone()) else { + self.expanded_diff_hunks_by_buffer.remove(&buffer_id); + return; + }; + let buffer_snapshot = buffer.read(cx).snapshot(); + let Some(diff) = self.diffs.get(&buffer_id) else { + self.expanded_diff_hunks_by_buffer.remove(&buffer_id); + return; + }; + let diff = diff.read(cx).snapshot(cx); + let hunks = diff + .hunks_in_row_range(0..u32::MAX, &buffer_snapshot) + .collect::>(); + self.expanded_diff_hunks_by_buffer + .entry(buffer_id) + .or_default() + .retain(|hunk_anchor| { + if !hunk_anchor.is_valid(&buffer_snapshot) { return false; } - while let Some(hunk) = hunks.peek() { - match hunk.buffer_range.start.cmp(hunk_anchor, &buffer) { - cmp::Ordering::Less => { - hunks.next(); - } - cmp::Ordering::Equal => { - let hunk_range = hunk.buffer_range.to_offset(&buffer); - return hunk_range.end >= excerpt_range.start - && hunk_range.start <= excerpt_range.end; - } - cmp::Ordering::Greater => break, - } - } - false + + let Ok(ix) = hunks.binary_search_by(|hunk| { + hunk.buffer_range.start.cmp(hunk_anchor, &buffer_snapshot) + }) else { + return false; + }; + let hunk_range = hunks[ix].buffer_range.to_point(&buffer_snapshot); + excerpts.iter().any(|excerpt| { + let excerpt_range = excerpt.range.to_point(&buffer_snapshot); + hunk_range.start >= excerpt_range.start && hunk_range.start <= excerpt_range.end + }) }); - } } - fn add_diff(&mut self, diff: Entity, cx: &mut App) { - let buffer_id = diff.read(cx).buffer_id; - self.diffs.insert(buffer_id, diff); + fn anchor_to_offset(&self, anchor: &Anchor, cx: &App) -> Option { + if anchor.diff_base_anchor().is_some() { + panic!("reference multibuffer cannot yet resolve anchors inside deleted hunks"); + } + let (anchor, snapshot, path_key) = self.anchor_to_buffer_anchor(anchor, cx)?; + // TODO(cole) can maybe make this and expected content call a common function instead + let (text, _, _, regions) = self.expected_content(cx); + + // Locate the first region that contains or is past the putative location of the buffer anchor + let ix = regions.partition_point(|region| { + let excerpt = region + .excerpt + .as_ref() + .expect("should have no buffers in empty reference multibuffer"); + excerpt + .path_key + .cmp(&path_key) + .then_with(|| { + if excerpt.range.end.cmp(&anchor, &snapshot).is_lt() { + Ordering::Less + } else if excerpt.range.start.cmp(&anchor, &snapshot).is_gt() { + Ordering::Greater + } else { + Ordering::Equal + } + }) + .then_with(|| { + if let Some(deleted_hunk_anchor) = region.deleted_hunk_anchor { + deleted_hunk_anchor.cmp(&anchor, &snapshot) + } else { + let point = anchor.to_point(&snapshot); + assert_eq!(region.buffer_id, Some(snapshot.remote_id())); + if region.buffer_range.end < point { + Ordering::Less + } else if region.buffer_range.start > point { + Ordering::Greater + } else { + Ordering::Equal + } + } + }) + .is_lt() + }); + + let Some(region) = regions.get(ix) else { + return Some(MultiBufferOffset(text.len())); + }; + + let offset = if region.buffer_id == Some(snapshot.remote_id()) { + let buffer_offset = anchor.to_offset(&snapshot); + let buffer_range = region.buffer_range.to_offset(&snapshot); + assert!(buffer_offset <= buffer_range.end); + let overshoot = buffer_offset.saturating_sub(buffer_range.start); + region.range.start + overshoot + } else { + region.range.start + }; + Some(MultiBufferOffset(offset)) } - fn add_inverted_diff( - &mut self, - diff: Entity, - main_buffer: Entity, + fn anchor_to_buffer_anchor( + &self, + anchor: &Anchor, cx: &App, - ) { - let base_text_buffer_id = diff.read(cx).base_text(cx).remote_id(); - self.inverted_diffs - .insert(base_text_buffer_id, (diff, main_buffer)); + ) -> Option<(text::Anchor, BufferSnapshot, PathKey)> { + let (excerpt, anchor) = match anchor { + Anchor::Min => { + let excerpt = self.excerpts.first()?; + (excerpt, excerpt.range.start) + } + Anchor::Excerpt(excerpt_anchor) => ( + self.excerpts.iter().find(|excerpt| { + excerpt.buffer.read(cx).remote_id() == excerpt_anchor.buffer_id() + })?, + excerpt_anchor.text_anchor, + ), + Anchor::Max => { + let excerpt = self.excerpts.last()?; + (excerpt, excerpt.range.end) + } + }; + + Some(( + anchor, + excerpt.buffer.read(cx).snapshot(), + excerpt.path_key.clone(), + )) } } @@ -2917,7 +3542,7 @@ async fn test_random_set_ranges(cx: &mut TestAppContext, mut rng: StdRng) { .collect::>(); ranges.sort_by_key(|range| range.start); log::info!("Setting ranges: {:?}", row_ranges(&ranges)); - let (created, _) = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( PathKey::for_buffer(&buf, cx), buf.clone(), @@ -2927,15 +3552,16 @@ async fn test_random_set_ranges(cx: &mut TestAppContext, mut rng: StdRng) { ) }); - assert_eq!(created.len(), ranges.len()); - let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); let mut last_end = None; let mut seen_ranges = Vec::default(); - for (_, buf, range) in snapshot.excerpts() { - let start = range.context.start.to_point(buf); - let end = range.context.end.to_point(buf); + for info in snapshot.excerpts() { + let buffer_snapshot = snapshot + .buffer_for_id(info.context.start.buffer_id) + .unwrap(); + let start = info.context.start.to_point(buffer_snapshot); + let end = info.context.end.to_point(buffer_snapshot); seen_ranges.push(start..end); if let Some(last_end) = last_end.take() { @@ -2987,23 +3613,32 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { }); cx.update(|cx| reference.diffs_updated(cx)); } - 15..=19 if !reference.excerpts.is_empty() => { + 15..=24 if !reference.excerpts.is_empty() => { multibuffer.update(cx, |multibuffer, cx| { - let ids = multibuffer.excerpt_ids(); + let snapshot = multibuffer.snapshot(cx); + let infos = snapshot.excerpts().collect::>(); let mut excerpts = HashSet::default(); - for _ in 0..rng.random_range(0..ids.len()) { - excerpts.extend(ids.choose(&mut rng).copied()); + for _ in 0..rng.random_range(0..infos.len()) { + excerpts.extend(infos.choose(&mut rng).cloned()); } let line_count = rng.random_range(0..5); let excerpt_ixs = excerpts .iter() - .map(|id| reference.excerpts.iter().position(|e| e.id == *id).unwrap()) + .map(|info| { + reference + .excerpts + .iter() + .position(|e| e.range == info.context) + .unwrap() + }) .collect::>(); log::info!("Expanding excerpts {excerpt_ixs:?} by {line_count} lines"); multibuffer.expand_excerpts( - excerpts.iter().cloned(), + excerpts + .iter() + .map(|info| snapshot.anchor_in_excerpt(info.context.end).unwrap()), line_count, ExpandExcerptDirection::UpAndDown, cx, @@ -3012,25 +3647,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { reference.expand_excerpts(&excerpts, line_count, cx); }); } - 20..=29 if !reference.excerpts.is_empty() => { - let mut ids_to_remove = vec![]; - for _ in 0..rng.random_range(1..=3) { - let Some(excerpt) = reference.excerpts.choose(&mut rng) else { - break; - }; - let id = excerpt.id; - cx.update(|cx| reference.remove_excerpt(id, cx)); - ids_to_remove.push(id); - } - let snapshot = - multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); - ids_to_remove.sort_unstable_by(|a, b| a.cmp(b, &snapshot)); - drop(snapshot); - multibuffer.update(cx, |multibuffer, cx| { - multibuffer.remove_excerpts(ids_to_remove, cx) - }); - } - 30..=39 if !reference.excerpts.is_empty() => { + 25..=34 if !reference.excerpts.is_empty() => { let multibuffer = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); let offset = multibuffer.clip_offset( @@ -3046,32 +3663,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { anchors.push(multibuffer.anchor_at(offset, bias)); anchors.sort_by(|a, b| a.cmp(b, &multibuffer)); } - 40..=44 if !anchors.is_empty() => { - let multibuffer = - multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); - let prev_len = anchors.len(); - anchors = multibuffer - .refresh_anchors(&anchors) - .into_iter() - .map(|a| a.1) - .collect(); - - // Ensure the newly-refreshed anchors point to a valid excerpt and don't - // overshoot its boundaries. - assert_eq!(anchors.len(), prev_len); - for anchor in &anchors { - if anchor.excerpt_id == ExcerptId::min() - || anchor.excerpt_id == ExcerptId::max() - { - continue; - } - - let excerpt = multibuffer.excerpt(anchor.excerpt_id).unwrap(); - assert_eq!(excerpt.id, anchor.excerpt_id); - assert!(excerpt.contains(anchor)); - } - } - 45..=55 if !reference.excerpts.is_empty() => { + 35..=45 if !reference.excerpts.is_empty() => { multibuffer.update(cx, |multibuffer, cx| { let snapshot = multibuffer.snapshot(cx); let excerpt_ix = rng.random_range(0..reference.excerpts.len()); @@ -3085,20 +3677,19 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { let start = excerpt.range.start; let end = excerpt.range.end; - let range = snapshot.anchor_in_excerpt(excerpt.id, start).unwrap() - ..snapshot.anchor_in_excerpt(excerpt.id, end).unwrap(); + let range = snapshot.anchor_in_excerpt(start).unwrap() + ..snapshot.anchor_in_excerpt(end).unwrap(); log::info!( - "expanding diff hunks in range {:?} (excerpt id {:?}, index {excerpt_ix:?}, buffer id {:?})", - range.to_offset(&snapshot), - excerpt.id, + "expanding diff hunks in range {:?} (excerpt index {excerpt_ix:?}, buffer id {:?})", + range.to_point(&snapshot), buffer_id, ); - reference.expand_diff_hunks(excerpt.id, start..end, cx); + reference.expand_diff_hunks(excerpt.path_key.clone(), start..end, cx); multibuffer.expand_diff_hunks(vec![range], cx); }); } - 56..=85 if needs_diff_calculation => { + 46..=75 if needs_diff_calculation => { multibuffer.update(cx, |multibuffer, cx| { for buffer in multibuffer.all_buffers() { let snapshot = buffer.read(cx).snapshot(); @@ -3129,13 +3720,6 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { // Decide if we're creating a new buffer or reusing an existing one let create_new_buffer = buffers.is_empty() || rng.random_bool(0.4); - let prev_excerpt_ix = rng.random_range(0..=reference.excerpts.len()); - let prev_excerpt_id = reference - .excerpts - .get(prev_excerpt_ix) - .map_or(ExcerptId::max(), |e| e.id); - let excerpt_ix = (prev_excerpt_ix + 1).min(reference.excerpts.len()); - let (excerpt_buffer, diff, inverted_main_buffer) = if create_new_buffer { let create_inverted = rng.random_bool(0.3); @@ -3213,43 +3797,45 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { } }; - let (range, anchor_range) = excerpt_buffer.read_with(cx, |buffer, _| { - let end_row = rng.random_range(0..=buffer.max_point().row); - let start_row = rng.random_range(0..=end_row); - let end_ix = buffer.point_to_offset(Point::new(end_row, 0)); - let start_ix = buffer.point_to_offset(Point::new(start_row, 0)); - let anchor_range = buffer.anchor_before(start_ix)..buffer.anchor_after(end_ix); - - log::info!( - "Inserting excerpt at {} of {} for buffer {}: {:?}[{:?}] = {:?}", - excerpt_ix, - reference.excerpts.len(), - buffer.remote_id(), - buffer.text(), - start_ix..end_ix, - &buffer.text()[start_ix..end_ix] - ); - - (start_ix..end_ix, anchor_range) + let excerpt_buffer_snapshot = + excerpt_buffer.read_with(cx, |excerpt_buffer, _| excerpt_buffer.snapshot()); + let mut ranges = reference + .excerpts + .iter() + .filter(|excerpt| excerpt.buffer == excerpt_buffer) + .map(|excerpt| excerpt.range.to_point(&excerpt_buffer_snapshot)) + .collect::>(); + mutate_excerpt_ranges(&mut rng, &mut ranges, &excerpt_buffer_snapshot, 1); + let ranges = ranges + .iter() + .cloned() + .map(ExcerptRange::new) + .collect::>(); + let path = cx.update(|cx| PathKey::for_buffer(&excerpt_buffer, cx)); + let path_key_index = multibuffer.update(cx, |multibuffer, _| { + multibuffer.get_or_create_path_key_index(&path) }); - let excerpt_id = multibuffer.update(cx, |multibuffer, cx| { - multibuffer - .insert_excerpts_after( - prev_excerpt_id, - excerpt_buffer.clone(), - [ExcerptRange::new(range.clone())], - cx, - ) - .pop() - .unwrap() + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpt_ranges_for_path( + path.clone(), + excerpt_buffer.clone(), + &excerpt_buffer_snapshot, + ranges.clone(), + cx, + ) }); - reference.insert_excerpt_after( - prev_excerpt_id, - excerpt_id, - (excerpt_buffer.clone(), anchor_range), - ); + cx.update(|cx| { + reference.set_excerpts( + path, + path_key_index, + excerpt_buffer.clone(), + &excerpt_buffer_snapshot, + ranges, + cx, + ) + }); let excerpt_buffer_id = excerpt_buffer.read_with(cx, |buffer, _| buffer.remote_id()); @@ -3283,6 +3869,39 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { } } +fn mutate_excerpt_ranges( + rng: &mut StdRng, + existing_ranges: &mut Vec>, + buffer: &BufferSnapshot, + operations: u32, +) { + let mut ranges_to_add = Vec::new(); + + for _ in 0..operations { + match rng.random_range(0..5) { + 0..=1 if !existing_ranges.is_empty() => { + let index = rng.random_range(0..existing_ranges.len()); + log::info!("Removing excerpt at index {index}"); + existing_ranges.remove(index); + } + _ => { + let end_row = rng.random_range(0..=buffer.max_point().row); + let start_row = rng.random_range(0..=end_row); + let end_col = buffer.line_len(end_row); + log::info!( + "Inserting excerpt for buffer {:?}, row range {:?}", + buffer.remote_id(), + start_row..end_row + ); + ranges_to_add.push(Point::new(start_row, 0)..Point::new(end_row, end_col)); + } + } + } + + existing_ranges.extend(ranges_to_add); + existing_ranges.sort_by(|l, r| l.start.cmp(&r.start)); +} + fn check_multibuffer( multibuffer: &MultiBuffer, reference: &ReferenceMultibuffer, @@ -3298,8 +3917,36 @@ fn check_multibuffer( .collect::>(); let actual_row_infos = snapshot.row_infos(MultiBufferRow(0)).collect::>(); - let (expected_text, expected_row_infos, expected_boundary_rows) = + let anchors_to_check = anchors + .iter() + .filter_map(|anchor| { + snapshot + .anchor_to_buffer_anchor(*anchor) + .map(|(anchor, _)| anchor) + }) + // Intentionally mix in some anchors that are (in general) not contained in any excerpt + .chain( + reference + .excerpts + .iter() + .map(|excerpt| excerpt.buffer.read(cx).remote_id()) + .dedup() + .flat_map(|buffer_id| { + [ + text::Anchor::min_for_buffer(buffer_id), + text::Anchor::max_for_buffer(buffer_id), + ] + }), + ) + .map(|anchor| snapshot.anchor_in_buffer(anchor).unwrap()) + .collect::>(); + + let (expected_text, expected_row_infos, expected_boundary_rows, _) = reference.expected_content(cx); + let expected_anchor_offsets = anchors_to_check + .iter() + .map(|anchor| reference.anchor_to_offset(anchor, cx).unwrap()) + .collect::>(); let has_diff = actual_row_infos .iter() @@ -3364,24 +4011,15 @@ fn check_multibuffer( .unwrap() + 1 ); - let reference_ranges = reference - .excerpts - .iter() - .map(|excerpt| { - ( - excerpt.id, - excerpt.range.to_offset(&excerpt.buffer.read(cx).snapshot()), - ) - }) - .collect::>(); for i in 0..snapshot.len().0 { - let excerpt = snapshot + let (_, excerpt_range) = snapshot .excerpt_containing(MultiBufferOffset(i)..MultiBufferOffset(i)) .unwrap(); - assert_eq!( - excerpt.buffer_range().start.0..excerpt.buffer_range().end.0, - reference_ranges[&excerpt.id()] - ); + reference + .excerpts + .iter() + .find(|reference_excerpt| reference_excerpt.range == excerpt_range.context) + .expect("corresponding excerpt should exist in reference multibuffer"); } assert_consistent_line_numbers(&snapshot); @@ -3436,6 +4074,15 @@ fn check_multibuffer( ); } + let actual_anchor_offsets = anchors_to_check + .into_iter() + .map(|anchor| anchor.to_offset(&snapshot)) + .collect::>(); + assert_eq!( + actual_anchor_offsets, expected_anchor_offsets, + "buffer anchor resolves to wrong offset" + ); + for _ in 0..10 { let end_ix = text_rope.clip_offset(rng.random_range(0..=text_rope.len()), Bias::Right); assert_eq!( @@ -3560,8 +4207,8 @@ fn test_history(cx: &mut App) { assert_eq!( multibuffer.edited_ranges_for_transaction(transaction_1, cx), &[ - Point::new(0, 0)..Point::new(0, 2), - Point::new(1, 0)..Point::new(1, 2) + MultiBufferOffset(0)..MultiBufferOffset(2), + MultiBufferOffset(7)..MultiBufferOffset(9), ] ); @@ -3777,7 +4424,6 @@ async fn test_summaries_for_anchors(cx: &mut TestAppContext) { }); cx.run_until_parked(); - let mut ids = vec![]; let multibuffer = cx.new(|cx| { let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); multibuffer.set_all_diff_hunks_expanded(cx); @@ -3797,7 +4443,6 @@ async fn test_summaries_for_anchors(cx: &mut TestAppContext) { ); multibuffer.add_diff(diff_1.clone(), cx); multibuffer.add_diff(diff_2.clone(), cx); - ids = multibuffer.excerpt_ids(); multibuffer }); @@ -3821,11 +4466,21 @@ async fn test_summaries_for_anchors(cx: &mut TestAppContext) { ), ); - let anchor_1 = Anchor::in_buffer(ids[0], text::Anchor::MIN); + let anchor_1 = multibuffer.read_with(cx, |multibuffer, cx| { + multibuffer + .snapshot(cx) + .anchor_in_excerpt(text::Anchor::min_for_buffer(buffer_1.read(cx).remote_id())) + .unwrap() + }); let point_1 = snapshot.summaries_for_anchors::([&anchor_1])[0]; assert_eq!(point_1, Point::new(0, 0)); - let anchor_2 = Anchor::in_buffer(ids[1], text::Anchor::MIN); + let anchor_2 = multibuffer.read_with(cx, |multibuffer, cx| { + multibuffer + .snapshot(cx) + .anchor_in_excerpt(text::Anchor::min_for_buffer(buffer_2.read(cx).remote_id())) + .unwrap() + }); let point_2 = snapshot.summaries_for_anchors::([&anchor_2])[0]; assert_eq!(point_2, Point::new(3, 0)); } @@ -3851,7 +4506,7 @@ async fn test_trailing_deletion_without_newline(cx: &mut TestAppContext) { cx, ); multibuffer.add_diff(diff_1.clone(), cx); - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); multibuffer }); @@ -3884,7 +4539,7 @@ async fn test_trailing_deletion_without_newline(cx: &mut TestAppContext) { let (_, translated_offset) = snapshot.point_to_buffer_offset(Point::new(2, 0)).unwrap(); assert_eq!(translated_offset.0, "one\n".len()); - let (_, translated_point, _) = snapshot.point_to_buffer_point(Point::new(2, 0)).unwrap(); + let (_, translated_point) = snapshot.point_to_buffer_point(Point::new(2, 0)).unwrap(); assert_eq!(translated_point, Point::new(1, 0)); // The same, for an excerpt that's not at the end of the multibuffer. @@ -3927,7 +4582,7 @@ async fn test_trailing_deletion_without_newline(cx: &mut TestAppContext) { let (buffer, translated_offset) = snapshot.point_to_buffer_offset(Point::new(2, 0)).unwrap(); assert_eq!(buffer.remote_id(), buffer_1_id); assert_eq!(translated_offset.0, "one\n".len()); - let (buffer, translated_point, _) = snapshot.point_to_buffer_point(Point::new(2, 0)).unwrap(); + let (buffer, translated_point) = snapshot.point_to_buffer_point(Point::new(2, 0)).unwrap(); assert_eq!(buffer.remote_id(), buffer_1_id); assert_eq!(translated_point, Point::new(1, 0)); } @@ -3967,6 +4622,7 @@ fn format_diff( }; let expand = info .expand_info + .as_ref() .map(|expand_info| match expand_info.direction { ExpandExcerptDirection::Up => " [↑]", ExpandExcerptDirection::Down => " [↓]", @@ -4310,9 +4966,15 @@ fn assert_excerpts_match( ) { let mut output = String::new(); multibuffer.read_with(cx, |multibuffer, cx| { - for (_, buffer, range) in multibuffer.snapshot(cx).excerpts() { + let snapshot = multibuffer.snapshot(cx); + for excerpt in multibuffer.snapshot(cx).excerpts() { output.push_str("-----\n"); - output.extend(buffer.text_for_range(range.context)); + output.extend( + snapshot + .buffer_for_id(excerpt.context.start.buffer_id) + .unwrap() + .text_for_range(excerpt.context), + ); if !output.ends_with('\n') { output.push('\n'); } @@ -4525,14 +5187,14 @@ fn assert_position_translation(snapshot: &MultiBufferSnapshot) { if let Some((buffer, offset)) = snapshot.point_to_buffer_offset(snapshot.max_point()) { assert!(offset.0 <= buffer.len()); } - if let Some((buffer, point, _)) = snapshot.point_to_buffer_point(snapshot.max_point()) { + if let Some((buffer, point)) = snapshot.point_to_buffer_point(snapshot.max_point()) { assert!(point <= buffer.max_point()); } } fn assert_line_indents(snapshot: &MultiBufferSnapshot) { let max_row = snapshot.max_point().row; - let buffer_id = snapshot.excerpts().next().unwrap().1.remote_id(); + let buffer_id = snapshot.excerpts().next().unwrap().context.start.buffer_id; let text = text::Buffer::new(ReplicaId::LOCAL, buffer_id, snapshot.text()); let mut line_indents = text .line_indents_in_row_range(0..max_row + 1) @@ -4720,7 +5382,8 @@ fn test_random_chunk_bitmaps_with_diffs(cx: &mut App, mut rng: StdRng) { let mut diffs = Vec::new(); multibuffer.update(cx, |multibuffer, cx| { - for buffer_id in multibuffer.excerpt_buffer_ids() { + let snapshot = multibuffer.snapshot(cx); + for buffer_id in snapshot.all_buffer_ids() { if rng.random_bool(0.7) { if let Some(buffer_handle) = multibuffer.buffer(buffer_id) { let buffer_text = buffer_handle.read(cx).text(); @@ -4881,7 +5544,7 @@ fn collect_word_diffs( }); multibuffer.update(cx, |multibuffer, cx| { - multibuffer.expand_diff_hunks(vec![Anchor::min()..Anchor::max()], cx); + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); }); let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); @@ -4996,38 +5659,40 @@ fn test_excerpts_containment_functions(cx: &mut App) { let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - let (excerpt_1_id, excerpt_2_id, excerpt_3_id) = multibuffer.update(cx, |multibuffer, cx| { - multibuffer.set_excerpts_for_path( - PathKey::sorted(0), - buffer_1.clone(), - [Point::new(0, 0)..Point::new(1, 3)], - 0, - cx, - ); + let (excerpt_1_info, excerpt_2_info, excerpt_3_info) = + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer_1.clone(), + [Point::new(0, 0)..Point::new(1, 3)], + 0, + cx, + ); - multibuffer.set_excerpts_for_path( - PathKey::sorted(1), - buffer_2.clone(), - [Point::new(0, 0)..Point::new(1, 3)], - 0, - cx, - ); + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), + buffer_2.clone(), + [Point::new(0, 0)..Point::new(1, 3)], + 0, + cx, + ); - multibuffer.set_excerpts_for_path( - PathKey::sorted(2), - buffer_3.clone(), - [Point::new(0, 0)..Point::new(0, 3)], - 0, - cx, - ); + multibuffer.set_excerpts_for_path( + PathKey::sorted(2), + buffer_3.clone(), + [Point::new(0, 0)..Point::new(0, 3)], + 0, + cx, + ); - let mut ids = multibuffer.excerpt_ids().into_iter(); - ( - ids.next().unwrap(), - ids.next().unwrap(), - ids.next().unwrap(), - ) - }); + let snapshot = multibuffer.snapshot(cx); + let mut excerpts = snapshot.excerpts(); + ( + excerpts.next().unwrap(), + excerpts.next().unwrap(), + excerpts.next().unwrap(), + ) + }); let snapshot = multibuffer.read(cx).snapshot(cx); @@ -5045,24 +5710,24 @@ fn test_excerpts_containment_functions(cx: &mut App) { let excerpts: Vec<_> = snapshot.excerpts_for_range(p00..p00).collect(); assert_eq!(excerpts.len(), 1); - assert_eq!(excerpts[0].id, excerpt_1_id); + assert_eq!(excerpts[0].range, excerpt_1_info); // Cursor at very end of excerpt 3 let excerpts: Vec<_> = snapshot.excerpts_for_range(p43..p43).collect(); assert_eq!(excerpts.len(), 1); - assert_eq!(excerpts[0].id, excerpt_3_id); + assert_eq!(excerpts[0].range, excerpt_3_info); let excerpts: Vec<_> = snapshot.excerpts_for_range(p00..p23).collect(); assert_eq!(excerpts.len(), 2); - assert_eq!(excerpts[0].id, excerpt_1_id); - assert_eq!(excerpts[1].id, excerpt_2_id); + assert_eq!(excerpts[0].range, excerpt_1_info); + assert_eq!(excerpts[1].range, excerpt_2_info); // This range represent an selection with end-point just inside excerpt_2 // Today we only expand the first excerpt, but another interpretation that // we could consider is expanding both here let excerpts: Vec<_> = snapshot.excerpts_for_range(p10..p20).collect(); assert_eq!(excerpts.len(), 1); - assert_eq!(excerpts[0].id, excerpt_1_id); + assert_eq!(excerpts[0].range, excerpt_1_info); //// Test that `excerpts_for_range` and `excerpt_containing` agree for all single offsets (cursor positions) for offset in 0..=snapshot.len().0 { @@ -5074,15 +5739,15 @@ fn test_excerpts_containment_functions(cx: &mut App) { "Expected exactly one excerpt for offset {offset}", ); - let excerpt_containing = snapshot.excerpt_containing(offset..offset); - assert!( - excerpt_containing.is_some(), - "Expected excerpt_containing to find excerpt for offset {offset}", - ); + let (_, excerpt_containing) = + snapshot + .excerpt_containing(offset..offset) + .unwrap_or_else(|| { + panic!("Expected excerpt_containing to find excerpt for offset {offset}") + }); assert_eq!( - excerpts_for_range[0].id, - excerpt_containing.unwrap().id(), + excerpts_for_range[0].range, excerpt_containing, "excerpts_for_range and excerpt_containing should agree for offset {offset}", ); } @@ -5090,9 +5755,8 @@ fn test_excerpts_containment_functions(cx: &mut App) { //// Test `excerpt_containing` behavior with ranges: // Ranges intersecting a single-excerpt - let containing = snapshot.excerpt_containing(p00..p13); - assert!(containing.is_some()); - assert_eq!(containing.unwrap().id(), excerpt_1_id); + let (_, containing) = snapshot.excerpt_containing(p00..p13).unwrap(); + assert_eq!(containing, excerpt_1_info); // Ranges intersecting multiple excerpts (should return None) let containing = snapshot.excerpt_containing(p20..p40); @@ -5103,14 +5767,12 @@ fn test_excerpts_containment_functions(cx: &mut App) { } #[gpui::test] -fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { - use std::ops::Bound; - +fn test_range_to_buffer_ranges(cx: &mut App) { let buffer_1 = cx.new(|cx| Buffer::local("aaa\nbbb", cx)); let buffer_2 = cx.new(|cx| Buffer::local("ccc", cx)); let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - let (excerpt_1_id, excerpt_2_id) = multibuffer.update(cx, |multibuffer, cx| { + multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( PathKey::sorted(0), buffer_1.clone(), @@ -5126,10 +5788,6 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { 0, cx, ); - - let excerpt_ids = multibuffer.excerpt_ids(); - - (excerpt_ids[0], excerpt_ids[1]) }); let snapshot = multibuffer.read(cx).snapshot(cx); @@ -5143,41 +5801,15 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { 1, "Half-open range ending at excerpt start should EXCLUDE that excerpt" ); - assert_eq!(ranges_half_open[0].2, excerpt_1_id); - - let ranges_inclusive = snapshot.range_to_buffer_ranges(Point::zero()..=excerpt_2_start); - assert_eq!( - ranges_inclusive.len(), - 2, - "Inclusive range ending at excerpt start should INCLUDE that excerpt" - ); - assert_eq!(ranges_inclusive[0].2, excerpt_1_id); - assert_eq!(ranges_inclusive[1].2, excerpt_2_id); - - let ranges_unbounded = - snapshot.range_to_buffer_ranges((Bound::Included(Point::zero()), Bound::Unbounded)); - assert_eq!( - ranges_unbounded.len(), - 2, - "Unbounded end should include all excerpts" - ); - assert_eq!(ranges_unbounded[0].2, excerpt_1_id); - assert_eq!(ranges_unbounded[1].2, excerpt_2_id); - - let ranges_excluded_end = snapshot.range_to_buffer_ranges(( - Bound::Included(Point::zero()), - Bound::Excluded(excerpt_2_start), - )); + assert_eq!(ranges_half_open[0].1, BufferOffset(0)..BufferOffset(7)); assert_eq!( - ranges_excluded_end.len(), - 1, - "Excluded end bound should exclude excerpt starting at that point" + ranges_half_open[0].0.remote_id(), + buffer_1.read(cx).remote_id() ); - assert_eq!(ranges_excluded_end[0].2, excerpt_1_id); let buffer_empty = cx.new(|cx| Buffer::local("", cx)); let multibuffer_trailing_empty = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); - let (te_excerpt_1_id, te_excerpt_2_id) = + let (_te_excerpt_1_info, _te_excerpt_2_info) = multibuffer_trailing_empty.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( PathKey::sorted(0), @@ -5195,8 +5827,9 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { cx, ); - let excerpt_ids = multibuffer.excerpt_ids(); - (excerpt_ids[0], excerpt_ids[1]) + let snapshot = multibuffer.snapshot(cx); + let mut infos = snapshot.excerpts(); + (infos.next().unwrap(), infos.next().unwrap()) }); let snapshot_trailing = multibuffer_trailing_empty.read(cx).snapshot(cx); @@ -5207,29 +5840,130 @@ fn test_range_to_buffer_ranges_with_range_bounds(cx: &mut App) { let ranges_half_open_max = snapshot_trailing.range_to_buffer_ranges(Point::zero()..max_point); assert_eq!( ranges_half_open_max.len(), - 1, - "Half-open range to max_point should EXCLUDE trailing empty excerpt at max_point" + 2, + "Should include trailing empty excerpts" + ); + assert_eq!(ranges_half_open_max[1].1, BufferOffset(0)..BufferOffset(0)); +} + +#[gpui::test] +async fn test_buffer_range_to_excerpt_ranges(cx: &mut TestAppContext) { + let base_text = indoc!( + " + aaa + bbb + ccc + ddd + eee + ppp + qqq + rrr + fff + ggg + hhh + " + ); + let text = indoc!( + " + aaa + BBB + ddd + eee + ppp + qqq + rrr + FFF + ggg + hhh + " + ); + + let buffer = cx.new(|cx| Buffer::local(text, cx)); + let diff = cx + .new(|cx| BufferDiff::new_with_base_text(base_text, &buffer.read(cx).text_snapshot(), cx)); + cx.run_until_parked(); + + let multibuffer = cx.new(|cx| { + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer.clone(), + [ + Point::new(0, 0)..Point::new(3, 3), + Point::new(7, 0)..Point::new(9, 3), + ], + 0, + cx, + ); + multibuffer.add_diff(diff.clone(), cx); + multibuffer + }); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.expand_diff_hunks(vec![Anchor::Min..Anchor::Max], cx); + }); + cx.run_until_parked(); + + let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); + + let actual_diff = format_diff( + &snapshot.text(), + &snapshot.row_infos(MultiBufferRow(0)).collect::>(), + &Default::default(), + None, + ); + let expected_diff = indoc!( + " + aaa + - bbb + - ccc + + BBB + ddd + eee [\u{2193}] + - fff [\u{2191}] + + FFF + ggg + hhh [\u{2193}]" + ); + pretty_assertions::assert_eq!(actual_diff, expected_diff); + + let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); + + let query_spanning_deleted_hunk = buffer_snapshot.anchor_after(Point::new(0, 0)) + ..buffer_snapshot.anchor_before(Point::new(1, 3)); + assert_eq!( + snapshot + .buffer_range_to_excerpt_ranges(query_spanning_deleted_hunk) + .map(|range| range.to_point(&snapshot)) + .collect::>(), + vec![ + Point::new(0, 0)..Point::new(1, 0), + Point::new(3, 0)..Point::new(3, 3), + ], ); - assert_eq!(ranges_half_open_max[0].2, te_excerpt_1_id); - let ranges_inclusive_max = snapshot_trailing.range_to_buffer_ranges(Point::zero()..=max_point); + let query_within_contiguous_main_buffer = buffer_snapshot.anchor_after(Point::new(1, 0)) + ..buffer_snapshot.anchor_before(Point::new(2, 3)); assert_eq!( - ranges_inclusive_max.len(), - 2, - "Inclusive range to max_point should INCLUDE trailing empty excerpt" + snapshot + .buffer_range_to_excerpt_ranges(query_within_contiguous_main_buffer) + .map(|range| range.to_point(&snapshot)) + .collect::>(), + vec![Point::new(3, 0)..Point::new(4, 3)], ); - assert_eq!(ranges_inclusive_max[0].2, te_excerpt_1_id); - assert_eq!(ranges_inclusive_max[1].2, te_excerpt_2_id); - let ranges_unbounded_trailing = snapshot_trailing - .range_to_buffer_ranges((Bound::Included(Point::zero()), Bound::Unbounded)); + let query_spanning_both_excerpts = buffer_snapshot.anchor_after(Point::new(2, 0)) + ..buffer_snapshot.anchor_before(Point::new(8, 3)); assert_eq!( - ranges_unbounded_trailing.len(), - 2, - "Unbounded end should include trailing empty excerpt" + snapshot + .buffer_range_to_excerpt_ranges(query_spanning_both_excerpts) + .map(|range| range.to_point(&snapshot)) + .collect::>(), + vec![ + Point::new(4, 0)..Point::new(5, 3), + Point::new(7, 0)..Point::new(8, 3), + ], ); - assert_eq!(ranges_unbounded_trailing[0].2, te_excerpt_1_id); - assert_eq!(ranges_unbounded_trailing[1].2, te_excerpt_2_id); } #[gpui::test] @@ -5275,17 +6009,14 @@ fn test_cannot_seek_backward_after_excerpt_replacement(cx: &mut TestAppContext) let (anchor_in_e_b2, anchor_in_e_b3) = multibuffer.read_with(cx, |multibuffer, cx| { let snapshot = multibuffer.snapshot(cx); - let excerpt_ids: Vec = snapshot.excerpts().map(|(id, _, _)| id).collect(); - assert_eq!(excerpt_ids.len(), 4, "expected 4 excerpts (3×B + 1×C)"); + let excerpt_infos = snapshot.excerpts().collect::>(); + assert_eq!(excerpt_infos.len(), 4, "expected 4 excerpts (3×B + 1×C)"); - let e_b2_id = excerpt_ids[1]; - let e_b3_id = excerpt_ids[2]; + let e_b2_info = excerpt_infos[1].clone(); + let e_b3_info = excerpt_infos[2].clone(); - let e_b2 = snapshot.excerpt(e_b2_id).expect("E_B2 should exist"); - let e_b3 = snapshot.excerpt(e_b3_id).expect("E_B3 should exist"); - - let anchor_b2 = Anchor::in_buffer(e_b2_id, e_b2.range.context.start); - let anchor_b3 = Anchor::in_buffer(e_b3_id, e_b3.range.context.start); + let anchor_b2 = snapshot.anchor_in_excerpt(e_b2_info.context.start).unwrap(); + let anchor_b3 = snapshot.anchor_in_excerpt(e_b3_info.context.start).unwrap(); (anchor_b2, anchor_b3) }); @@ -5314,3 +6045,104 @@ fn test_cannot_seek_backward_after_excerpt_replacement(cx: &mut TestAppContext) snapshot.summaries_for_anchors::(&[anchor_in_e_b2, anchor_in_e_b3]); }); } + +#[gpui::test] +fn test_resolving_max_anchor_for_buffer(cx: &mut TestAppContext) { + let dock_base_text = indoc! {" + 0 + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9 + 10 + 11 + 12 + "}; + + let dock_text = indoc! {" + 0 + 4 + 5 + 6 + 10 + 11 + 12 + "}; + + let dock_buffer = cx.new(|cx| Buffer::local(dock_text, cx)); + let diff = cx.new(|cx| { + BufferDiff::new_with_base_text(dock_base_text, &dock_buffer.read(cx).snapshot(), cx) + }); + + let workspace_text = "second buffer\n"; + let workspace_buffer = cx.new(|cx| Buffer::local(workspace_text, cx)); + + let dock_path = PathKey::with_sort_prefix(0, rel_path("").into_arc()); + let workspace_path = PathKey::with_sort_prefix(1, rel_path("").into_arc()); + + let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite)); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.set_excerpt_ranges_for_path( + dock_path, + dock_buffer.clone(), + &dock_buffer.read(cx).snapshot(), + vec![ + ExcerptRange::new(Point::zero()..Point::new(1, 1)), + ExcerptRange::new(Point::new(3, 0)..Point::new(4, 2)), + ], + cx, + ); + multibuffer.set_excerpt_ranges_for_path( + workspace_path, + workspace_buffer.clone(), + &workspace_buffer.read(cx).snapshot(), + vec![ExcerptRange::new( + Point::zero()..workspace_buffer.read(cx).max_point(), + )], + cx, + ); + multibuffer.add_diff(diff, cx); + multibuffer.set_all_diff_hunks_expanded(cx); + }); + + let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx)); + let diff = format_diff( + &snapshot.text(), + &snapshot.row_infos(MultiBufferRow(0)).collect::>(), + &Default::default(), + None, + ); + assert_eq!( + diff, + indoc! {" + 0 + - 1 + - 2 + - 3 + 4 [↓] + 6 [↑] + - 7 + - 8 + - 9 + 10 [↓] + second buffer + "} + ); + + multibuffer.update(cx, |multibuffer, cx| { + let snapshot = multibuffer.snapshot(cx); + let point = snapshot + .anchor_in_buffer(text::Anchor::max_for_buffer( + dock_buffer.read(cx).remote_id(), + )) + .unwrap() + .to_point(&snapshot); + assert_eq!(point, Point::new(10, 0)); + }) +} diff --git a/crates/multi_buffer/src/path_key.rs b/crates/multi_buffer/src/path_key.rs index 09d17d7b7fe2e9e666ba6c5777216c9c8ba4dea0..5c2123d0f9c1b09c16fd99531973df81c45140f7 100644 --- a/crates/multi_buffer/src/path_key.rs +++ b/crates/multi_buffer/src/path_key.rs @@ -1,24 +1,20 @@ -use std::{mem, ops::Range, sync::Arc}; +use std::{ops::Range, rc::Rc, sync::Arc}; -use collections::HashSet; use gpui::{App, AppContext, Context, Entity}; use itertools::Itertools; use language::{Buffer, BufferSnapshot}; use rope::Point; -use text::{Bias, OffsetRangeExt, locator::Locator}; -use util::{post_inc, rel_path::RelPath}; +use sum_tree::{Dimensions, SumTree}; +use text::{Bias, BufferId, Edit, OffsetRangeExt, Patch}; +use util::rel_path::RelPath; use ztracing::instrument; use crate::{ - Anchor, ExcerptId, ExcerptRange, ExpandExcerptDirection, MultiBuffer, build_excerpt_ranges, + Anchor, BufferState, BufferStateSnapshot, DiffChangeKind, Event, Excerpt, ExcerptOffset, + ExcerptRange, ExcerptSummary, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, + PathKeyIndex, build_excerpt_ranges, remove_diff_state, }; -#[derive(Debug, Clone)] -pub struct PathExcerptInsertResult { - pub excerpt_ids: Vec, - pub added_new_excerpt: bool, -} - #[derive(PartialEq, Eq, Ord, PartialOrd, Clone, Hash, Debug)] pub struct PathKey { // Used by the derived PartialOrd & Ord @@ -27,6 +23,13 @@ pub struct PathKey { } impl PathKey { + pub fn min() -> Self { + Self { + sort_prefix: None, + path: RelPath::empty().into_arc(), + } + } + pub fn sorted(sort_prefix: u64) -> Self { Self { sort_prefix: Some(sort_prefix), @@ -55,41 +58,17 @@ impl PathKey { } impl MultiBuffer { - pub fn paths(&self) -> impl Iterator + '_ { - self.excerpts_by_path.keys() - } - - pub fn excerpts_for_path(&self, path: &PathKey) -> impl '_ + Iterator { - self.excerpts_by_path - .get(path) - .map(|excerpts| excerpts.as_slice()) - .unwrap_or_default() - .iter() - .copied() - } - - pub fn path_for_excerpt(&self, excerpt: ExcerptId) -> Option { - self.paths_by_excerpt.get(&excerpt).cloned() - } - - pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context) { - if let Some(to_remove) = self.excerpts_by_path.remove(&path) { - self.remove_excerpts(to_remove, cx) - } - } - pub fn buffer_for_path(&self, path: &PathKey, cx: &App) -> Option> { - let excerpt_id = self.excerpts_by_path.get(path)?.first()?; - let snapshot = self.read(cx); - let excerpt = snapshot.excerpt(*excerpt_id)?; - self.buffer(excerpt.buffer_id) + let snapshot = self.snapshot(cx); + let excerpt = snapshot.excerpts_for_path(path).next()?; + self.buffer(excerpt.context.start.buffer_id) } pub fn location_for_path(&self, path: &PathKey, cx: &App) -> Option { - let excerpt_id = self.excerpts_by_path.get(path)?.first()?; - let snapshot = self.read(cx); - let excerpt = snapshot.excerpt(*excerpt_id)?; - Some(Anchor::in_buffer(excerpt.id, excerpt.range.context.start)) + let snapshot = self.snapshot(cx); + let excerpt = snapshot.excerpts_for_path(path).next()?; + let path_key_index = snapshot.path_key_index_for_buffer(excerpt.context.start.buffer_id)?; + Some(Anchor::in_buffer(path_key_index, excerpt.context.start)) } pub fn set_excerpts_for_buffer( @@ -98,12 +77,14 @@ impl MultiBuffer { ranges: impl IntoIterator>, context_line_count: u32, cx: &mut Context, - ) -> (Vec>, bool) { + ) -> bool { let path = PathKey::for_buffer(&buffer, cx); self.set_excerpts_for_path(path, buffer, ranges, context_line_count, cx) } /// Sets excerpts, returns `true` if at least one new excerpt was added. + /// + /// Any existing excerpts for this buffer or this path will be replaced by the provided ranges. #[instrument(skip_all)] pub fn set_excerpts_for_path( &mut self, @@ -112,20 +93,83 @@ impl MultiBuffer { ranges: impl IntoIterator>, context_line_count: u32, cx: &mut Context, - ) -> (Vec>, bool) { + ) -> bool { let buffer_snapshot = buffer.read(cx).snapshot(); + let ranges: Vec<_> = ranges.into_iter().collect(); let excerpt_ranges = build_excerpt_ranges(ranges, context_line_count, &buffer_snapshot); - let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); - self.set_merged_excerpt_ranges_for_path( - path, - buffer, - excerpt_ranges, + let merged = Self::merge_excerpt_ranges(&excerpt_ranges); + let (inserted, _path_key_index) = + self.set_merged_excerpt_ranges_for_path(path, buffer, &buffer_snapshot, merged, cx); + inserted + } + + /// Like [`Self::set_excerpts_for_path`], but expands the provided ranges to cover any overlapping existing excerpts + /// for the same buffer and path. + /// + /// Existing excerpts that do not overlap any of the provided ranges are discarded. + pub fn update_excerpts_for_path( + &mut self, + path: PathKey, + buffer: Entity, + ranges: impl IntoIterator>, + context_line_count: u32, + cx: &mut Context, + ) -> bool { + let buffer_snapshot = buffer.read(cx).snapshot(); + let ranges: Vec<_> = ranges.into_iter().collect(); + let excerpt_ranges = build_excerpt_ranges(ranges, context_line_count, &buffer_snapshot); + let merged = self.merge_new_with_existing_excerpt_ranges( + &path, &buffer_snapshot, - new, - counts, + excerpt_ranges, cx, - ) + ); + + let (inserted, _path_key_index) = + self.set_merged_excerpt_ranges_for_path(path, buffer, &buffer_snapshot, merged, cx); + inserted + } + + pub fn merge_new_with_existing_excerpt_ranges( + &self, + path: &PathKey, + buffer_snapshot: &BufferSnapshot, + mut excerpt_ranges: Vec>, + cx: &App, + ) -> Vec> { + let multibuffer_snapshot = self.snapshot(cx); + + if multibuffer_snapshot.path_for_buffer(buffer_snapshot.remote_id()) == Some(path) { + excerpt_ranges.sort_by_key(|range| range.context.start); + let mut combined_ranges = Vec::new(); + let mut new_ranges = excerpt_ranges.into_iter().peekable(); + for existing_range in + multibuffer_snapshot.excerpts_for_buffer(buffer_snapshot.remote_id()) + { + let existing_range = ExcerptRange { + context: existing_range.context.to_point(buffer_snapshot), + primary: existing_range.primary.to_point(buffer_snapshot), + }; + while let Some(new_range) = new_ranges.peek() + && new_range.context.end < existing_range.context.start + { + combined_ranges.push(new_range.clone()); + new_ranges.next(); + } + + if let Some(new_range) = new_ranges.peek() + && new_range.context.start <= existing_range.context.end + { + combined_ranges.push(existing_range) + } + } + combined_ranges.extend(new_ranges); + excerpt_ranges = combined_ranges; + } + + excerpt_ranges.sort_by_key(|range| range.context.start); + Self::merge_excerpt_ranges(&excerpt_ranges) } pub fn set_excerpt_ranges_for_path( @@ -135,17 +179,11 @@ impl MultiBuffer { buffer_snapshot: &BufferSnapshot, excerpt_ranges: Vec>, cx: &mut Context, - ) -> (Vec>, bool) { - let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); - self.set_merged_excerpt_ranges_for_path( - path, - buffer, - excerpt_ranges, - buffer_snapshot, - new, - counts, - cx, - ) + ) -> bool { + let merged = Self::merge_excerpt_ranges(&excerpt_ranges); + let (inserted, _path_key_index) = + self.set_merged_excerpt_ranges_for_path(path, buffer, buffer_snapshot, merged, cx); + inserted } pub fn set_anchored_excerpts_for_path( @@ -161,350 +199,505 @@ impl MultiBuffer { let mut app = cx.to_async(); async move { let snapshot = buffer_snapshot.clone(); - let (excerpt_ranges, new, counts) = app + let (ranges, merged_excerpt_ranges) = app .background_spawn(async move { - let ranges = ranges.into_iter().map(|range| range.to_point(&snapshot)); + let point_ranges = ranges.iter().map(|range| range.to_point(&snapshot)); let excerpt_ranges = - build_excerpt_ranges(ranges, context_line_count, &snapshot); - let (new, counts) = Self::merge_excerpt_ranges(&excerpt_ranges); - (excerpt_ranges, new, counts) + build_excerpt_ranges(point_ranges, context_line_count, &snapshot); + let merged = Self::merge_excerpt_ranges(&excerpt_ranges); + (ranges, merged) }) .await; multi_buffer .update(&mut app, move |multi_buffer, cx| { - let (ranges, _) = multi_buffer.set_merged_excerpt_ranges_for_path( + let (_, path_key_index) = multi_buffer.set_merged_excerpt_ranges_for_path( path_key, buffer, - excerpt_ranges, &buffer_snapshot, - new, - counts, + merged_excerpt_ranges, cx, ); ranges + .into_iter() + .map(|range| Anchor::range_in_buffer(path_key_index, range)) + .collect() }) .ok() .unwrap_or_default() } } - pub(super) fn expand_excerpts_with_paths( + pub fn expand_excerpts( &mut self, - ids: impl IntoIterator, + anchors: impl IntoIterator, line_count: u32, direction: ExpandExcerptDirection, cx: &mut Context, ) { - let mut sorted_ids: Vec = ids.into_iter().collect(); - sorted_ids.sort_by(|a, b| { - let path_a = self.paths_by_excerpt.get(a); - let path_b = self.paths_by_excerpt.get(b); - path_a.cmp(&path_b) - }); - let grouped = sorted_ids - .into_iter() - .chunk_by(|id| self.paths_by_excerpt.get(id).cloned()) + if line_count == 0 { + return; + } + + let snapshot = self.snapshot(cx); + let mut sorted_anchors = anchors .into_iter() - .filter_map(|(k, v)| Some((k?, v.into_iter().collect::>()))) + .filter_map(|anchor| anchor.excerpt_anchor()) .collect::>(); - let snapshot = self.snapshot(cx); - - for (path, ids) in grouped.into_iter() { - let Some(excerpt_ids) = self.excerpts_by_path.get(&path) else { + if sorted_anchors.is_empty() { + return; + } + sorted_anchors.sort_by(|a, b| a.cmp(b, &snapshot)); + let buffers = sorted_anchors.into_iter().chunk_by(|anchor| anchor.path); + let mut cursor = snapshot.excerpts.cursor::(()); + + for (path_index, excerpt_anchors) in &buffers { + let path = snapshot + .path_keys_by_index + .get(&path_index) + .expect("anchor from wrong multibuffer"); + + let mut excerpt_anchors = excerpt_anchors.peekable(); + let mut ranges = Vec::new(); + + cursor.seek_forward(path, Bias::Left); + let Some((buffer, buffer_snapshot)) = cursor + .item() + .map(|excerpt| (excerpt.buffer(&self), excerpt.buffer_snapshot(&snapshot))) + else { continue; }; - let ids_to_expand = HashSet::from_iter(ids); - let mut excerpt_id_ = None; - let expanded_ranges = excerpt_ids.iter().filter_map(|excerpt_id| { - let excerpt = snapshot.excerpt(*excerpt_id)?; - let excerpt_id = excerpt.id; - if excerpt_id_.is_none() { - excerpt_id_ = Some(excerpt_id); + while let Some(excerpt) = cursor.item() + && &excerpt.path_key == path + { + let mut range = ExcerptRange { + context: excerpt.range.context.to_point(buffer_snapshot), + primary: excerpt.range.primary.to_point(buffer_snapshot), + }; + + let mut needs_expand = false; + while excerpt_anchors.peek().is_some_and(|anchor| { + excerpt + .range + .contains(&anchor.text_anchor(), buffer_snapshot) + }) { + needs_expand = true; + excerpt_anchors.next(); } - let mut context = excerpt.range.context.to_point(&excerpt.buffer); - if ids_to_expand.contains(&excerpt_id) { + if needs_expand { match direction { ExpandExcerptDirection::Up => { - context.start.row = context.start.row.saturating_sub(line_count); - context.start.column = 0; + range.context.start.row = + range.context.start.row.saturating_sub(line_count); + range.context.start.column = 0; } ExpandExcerptDirection::Down => { - context.end.row = - (context.end.row + line_count).min(excerpt.buffer.max_point().row); - context.end.column = excerpt.buffer.line_len(context.end.row); + range.context.end.row = (range.context.end.row + line_count) + .min(excerpt.buffer_snapshot(&snapshot).max_point().row); + range.context.end.column = excerpt + .buffer_snapshot(&snapshot) + .line_len(range.context.end.row); } ExpandExcerptDirection::UpAndDown => { - context.start.row = context.start.row.saturating_sub(line_count); - context.start.column = 0; - context.end.row = - (context.end.row + line_count).min(excerpt.buffer.max_point().row); - context.end.column = excerpt.buffer.line_len(context.end.row); + range.context.start.row = + range.context.start.row.saturating_sub(line_count); + range.context.start.column = 0; + range.context.end.row = (range.context.end.row + line_count) + .min(excerpt.buffer_snapshot(&snapshot).max_point().row); + range.context.end.column = excerpt + .buffer_snapshot(&snapshot) + .line_len(range.context.end.row); } } } - Some(ExcerptRange { - context, - primary: excerpt.range.primary.to_point(&excerpt.buffer), - }) - }); - let mut merged_ranges: Vec> = Vec::new(); - for range in expanded_ranges { - if let Some(last_range) = merged_ranges.last_mut() - && last_range.context.end >= range.context.start - { - last_range.context.end = range.context.end; - continue; - } - merged_ranges.push(range) + ranges.push(range); + cursor.next(); } - let Some(excerpt_id) = excerpt_id_ else { - continue; - }; - let Some(buffer_id) = &snapshot.buffer_id_for_excerpt(excerpt_id) else { - continue; - }; - let Some(buffer) = self.buffers.get(buffer_id).map(|b| b.buffer.clone()) else { - continue; - }; + ranges.sort_by(|l, r| l.context.start.cmp(&r.context.start)); - let buffer_snapshot = buffer.read(cx).snapshot(); - self.update_path_excerpts(path.clone(), buffer, &buffer_snapshot, merged_ranges, cx); + self.set_excerpt_ranges_for_path(path.clone(), buffer, buffer_snapshot, ranges, cx); } } /// Sets excerpts, returns `true` if at least one new excerpt was added. - fn set_merged_excerpt_ranges_for_path( + pub(crate) fn set_merged_excerpt_ranges_for_path( &mut self, path: PathKey, buffer: Entity, - ranges: Vec>, buffer_snapshot: &BufferSnapshot, - new: Vec>, - counts: Vec, + new: Vec>, cx: &mut Context, - ) -> (Vec>, bool) { - let insert_result = self.update_path_excerpts(path, buffer, buffer_snapshot, new, cx); - - let mut result = Vec::new(); - let mut ranges = ranges.into_iter(); - for (excerpt_id, range_count) in insert_result - .excerpt_ids + ) -> (bool, PathKeyIndex) + where + T: language::ToOffset, + { + let anchor_ranges = new .into_iter() - .zip(counts.into_iter()) - { - for range in ranges.by_ref().take(range_count) { - let range = Anchor::range_in_buffer( - excerpt_id, - buffer_snapshot.anchor_before(&range.primary.start) - ..buffer_snapshot.anchor_after(&range.primary.end), - ); - result.push(range) - } + .map(|r| ExcerptRange { + context: buffer_snapshot.anchor_before(r.context.start) + ..buffer_snapshot.anchor_after(r.context.end), + primary: buffer_snapshot.anchor_before(r.primary.start) + ..buffer_snapshot.anchor_after(r.primary.end), + }) + .collect::>(); + let inserted = + self.update_path_excerpts(path.clone(), buffer, buffer_snapshot, &anchor_ranges, cx); + let path_key_index = self.get_or_create_path_key_index(&path); + (inserted, path_key_index) + } + + pub(crate) fn get_or_create_path_key_index(&mut self, path_key: &PathKey) -> PathKeyIndex { + let mut snapshot = self.snapshot.borrow_mut(); + + if let Some(&existing) = snapshot.indices_by_path_key.get(path_key) { + return existing; } - (result, insert_result.added_new_excerpt) + + let index = snapshot + .path_keys_by_index + .last() + .map(|(index, _)| PathKeyIndex(index.0 + 1)) + .unwrap_or(PathKeyIndex(0)); + snapshot.path_keys_by_index.insert(index, path_key.clone()); + snapshot.indices_by_path_key.insert(path_key.clone(), index); + index } pub fn update_path_excerpts( &mut self, - path: PathKey, + path_key: PathKey, buffer: Entity, buffer_snapshot: &BufferSnapshot, - new: Vec>, + to_insert: &Vec>, cx: &mut Context, - ) -> PathExcerptInsertResult { - let mut insert_after = self - .excerpts_by_path - .range(..path.clone()) - .next_back() - .and_then(|(_, value)| value.last().copied()) - .unwrap_or(ExcerptId::min()); - - let existing = self - .excerpts_by_path - .get(&path) - .cloned() - .unwrap_or_default(); - let mut new_iter = new.into_iter().peekable(); - let mut existing_iter = existing.into_iter().peekable(); - - let mut excerpt_ids = Vec::new(); - let mut to_remove = Vec::new(); - let mut to_insert: Vec<(ExcerptId, ExcerptRange)> = Vec::new(); - let mut added_a_new_excerpt = false; - let snapshot = self.snapshot(cx); + ) -> bool { + let path_key_index = self.get_or_create_path_key_index(&path_key); + if let Some(old_path_key) = self + .snapshot(cx) + .path_for_buffer(buffer_snapshot.remote_id()) + && old_path_key != &path_key + { + self.remove_excerpts(old_path_key.clone(), cx); + } - let mut next_excerpt_id = - if let Some(last_entry) = self.snapshot.get_mut().excerpt_ids.last() { - last_entry.id.0 + 1 - } else { - 1 - }; + if to_insert.len() == 0 { + self.remove_excerpts(path_key.clone(), cx); - let mut next_excerpt_id = move || ExcerptId(post_inc(&mut next_excerpt_id)); + return false; + } + assert_eq!(self.history.transaction_depth(), 0); + self.sync_mut(cx); - let mut excerpts_cursor = snapshot.excerpts.cursor::>(()); - excerpts_cursor.next(); + let buffer_id = buffer_snapshot.remote_id(); - loop { - let existing = if let Some(&existing_id) = existing_iter.peek() { - let locator = snapshot.excerpt_locator_for_id(existing_id); - excerpts_cursor.seek_forward(&Some(locator), Bias::Left); - if let Some(excerpt) = excerpts_cursor.item() { - if excerpt.buffer_id != buffer_snapshot.remote_id() { - to_remove.push(existing_id); - existing_iter.next(); - continue; - } - Some((existing_id, excerpt.range.context.to_point(buffer_snapshot))) - } else { - None - } - } else { - None + let mut snapshot = self.snapshot.get_mut(); + let mut cursor = snapshot + .excerpts + .cursor::>(()); + let mut new_excerpts = SumTree::new(()); + + let new_ranges = to_insert.clone(); + let mut to_insert = to_insert.iter().peekable(); + let mut patch = Patch::empty(); + let mut added_new_excerpt = false; + + new_excerpts.append(cursor.slice(&path_key, Bias::Left), ()); + + // handle the case where the path key used to be associated + // with a different buffer by removing its excerpts. + if let Some(excerpt) = cursor.item() + && &excerpt.path_key == &path_key + && excerpt.buffer_id != buffer_id + { + let old_buffer_id = excerpt.buffer_id; + self.buffers.remove(&old_buffer_id); + snapshot.buffers.remove(&old_buffer_id); + remove_diff_state(&mut snapshot.diffs, old_buffer_id); + self.diffs.remove(&old_buffer_id); + let before = cursor.position.1; + cursor.seek_forward(&path_key, Bias::Right); + let after = cursor.position.1; + patch.push(Edit { + old: before..after, + new: new_excerpts.summary().len()..new_excerpts.summary().len(), + }); + cx.emit(Event::BuffersRemoved { + removed_buffer_ids: vec![old_buffer_id], + }); + } + + while let Some(excerpt) = cursor.item() + && excerpt.path_key == path_key + { + assert_eq!(excerpt.buffer_id, buffer_id); + let Some(next_excerpt) = to_insert.peek() else { + break; }; + if &excerpt.range == *next_excerpt { + let before = new_excerpts.summary().len(); + new_excerpts.update_last( + |prev_excerpt| { + if !prev_excerpt.has_trailing_newline { + prev_excerpt.has_trailing_newline = true; + patch.push(Edit { + old: cursor.position.1..cursor.position.1, + new: before..before + MultiBufferOffset(1), + }); + } + }, + (), + ); + new_excerpts.push(excerpt.clone(), ()); + to_insert.next(); + cursor.next(); + continue; + } - let new = new_iter.peek(); - // Try to merge the next new range or existing excerpt into the last - // queued insert. - if let Some((last_id, last)) = to_insert.last_mut() { - // Next new range overlaps the last queued insert: absorb it by - // extending the insert's end. - if let Some(new) = new - && last.context.end >= new.context.start - { - last.context.end = last.context.end.max(new.context.end); - excerpt_ids.push(*last_id); - new_iter.next(); - continue; - } - // Next existing excerpt overlaps the last queued insert: absorb - // it by extending the insert's end, and record the existing - // excerpt as replaced so anchors in it resolve to the new one. - if let Some((existing_id, existing_range)) = &existing - && last.context.end >= existing_range.start - { - last.context.end = last.context.end.max(existing_range.end); - to_remove.push(*existing_id); - Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts) - .insert(*existing_id, *last_id); - existing_iter.next(); - continue; - } + if excerpt + .range + .context + .start + .cmp(&next_excerpt.context.start, &buffer_snapshot) + .is_le() + { + // remove old excerpt + let before = cursor.position.1; + cursor.next(); + let after = cursor.position.1; + patch.push(Edit { + old: before..after, + new: new_excerpts.summary().len()..new_excerpts.summary().len(), + }); + } else { + // insert new excerpt + let next_excerpt = to_insert.next().unwrap(); + added_new_excerpt = true; + let before = new_excerpts.summary().len(); + new_excerpts.update_last( + |prev_excerpt| { + prev_excerpt.has_trailing_newline = true; + }, + (), + ); + new_excerpts.push( + Excerpt::new( + path_key.clone(), + path_key_index, + &buffer_snapshot, + next_excerpt.clone(), + false, + ), + (), + ); + let after = new_excerpts.summary().len(); + patch.push_maybe_empty(Edit { + old: cursor.position.1..cursor.position.1, + new: before..after, + }); } + } - match (new, existing) { - (None, None) => break, + // remove any further trailing excerpts + let mut before = cursor.position.1; + cursor.seek_forward(&path_key, Bias::Right); + let after = cursor.position.1; + // if we removed the previous last excerpt, remove the trailing newline from the new last excerpt + if cursor.item().is_none() && to_insert.peek().is_none() { + new_excerpts.update_last( + |excerpt| { + if excerpt.has_trailing_newline { + before.0.0 = before + .0 + .0 + .checked_sub(1) + .expect("should have preceding excerpt"); + excerpt.has_trailing_newline = false; + } + }, + (), + ); + } + patch.push(Edit { + old: before..after, + new: new_excerpts.summary().len()..new_excerpts.summary().len(), + }); - // No more new ranges; remove the remaining existing excerpt. - (None, Some((existing_id, _))) => { - existing_iter.next(); - to_remove.push(existing_id); - } + while let Some(next_excerpt) = to_insert.next() { + added_new_excerpt = true; + let before = new_excerpts.summary().len(); + new_excerpts.update_last( + |prev_excerpt| { + prev_excerpt.has_trailing_newline = true; + }, + (), + ); + new_excerpts.push( + Excerpt::new( + path_key.clone(), + path_key_index, + &buffer_snapshot, + next_excerpt.clone(), + false, + ), + (), + ); + let after = new_excerpts.summary().len(); + patch.push_maybe_empty(Edit { + old: cursor.position.1..cursor.position.1, + new: before..after, + }); + } - // No more existing excerpts; queue the new range for insertion. - (Some(_), None) => { - added_a_new_excerpt = true; - let new_id = next_excerpt_id(); - excerpt_ids.push(new_id); - to_insert.push((new_id, new_iter.next().unwrap())); - } + let suffix_start = cursor.position.1; + let suffix = cursor.suffix(); + let changed_trailing_excerpt = suffix.is_empty(); + if !suffix.is_empty() { + let before = new_excerpts.summary().len(); + new_excerpts.update_last( + |prev_excerpt| { + if !prev_excerpt.has_trailing_newline { + prev_excerpt.has_trailing_newline = true; + patch.push(Edit { + old: suffix_start..suffix_start, + new: before..before + MultiBufferOffset(1), + }); + } + }, + (), + ); + } + new_excerpts.append(suffix, ()); + drop(cursor); + + snapshot.excerpts = new_excerpts; + snapshot.buffers.insert( + buffer_id, + BufferStateSnapshot { + path_key: path_key.clone(), + path_key_index, + buffer_snapshot: buffer_snapshot.clone(), + }, + ); + + self.buffers.entry(buffer_id).or_insert_with(|| { + self.buffer_changed_since_sync.replace(true); + buffer.update(cx, |buffer, _| { + buffer.record_changes(Rc::downgrade(&self.buffer_changed_since_sync)); + }); + BufferState { + _subscriptions: [ + cx.observe(&buffer, |_, _, cx| cx.notify()), + cx.subscribe(&buffer, Self::on_buffer_event), + ], + buffer: buffer.clone(), + } + }); - // Existing excerpt ends before the new range starts, so it - // has no corresponding new range and must be removed. Flush - // pending inserts and advance `insert_after` past it so that - // future inserts receive locators *after* this excerpt's - // locator, preserving forward ordering. - (Some(new), Some((_, existing_range))) - if existing_range.end < new.context.start => - { - self.insert_excerpts_with_ids_after( - insert_after, - buffer.clone(), - mem::take(&mut to_insert), - cx, - ); - insert_after = existing_iter.next().unwrap(); - to_remove.push(insert_after); - } - // New range ends before the existing excerpt starts, so the - // new range has no corresponding existing excerpt. Queue it - // for insertion at the current `insert_after` position - // (before the existing excerpt), which is the correct - // spatial ordering. - (Some(new), Some((_, existing_range))) - if existing_range.start > new.context.end => - { - let new_id = next_excerpt_id(); - excerpt_ids.push(new_id); - to_insert.push((new_id, new_iter.next().unwrap())); - } - // Exact match: keep the existing excerpt in place, flush - // any pending inserts before it, and use it as the new - // `insert_after` anchor. - (Some(new), Some((_, existing_range))) - if existing_range.start == new.context.start - && existing_range.end == new.context.end => - { - self.insert_excerpts_with_ids_after( - insert_after, - buffer.clone(), - mem::take(&mut to_insert), - cx, - ); - insert_after = existing_iter.next().unwrap(); - excerpt_ids.push(insert_after); - new_iter.next(); - } + if changed_trailing_excerpt { + snapshot.trailing_excerpt_update_count += 1; + } - // Partial overlap: replace the existing excerpt with a new - // one whose range is the union of both, and record the - // replacement so that anchors in the old excerpt resolve to - // the new one. - (Some(_), Some((_, existing_range))) => { - let existing_id = existing_iter.next().unwrap(); - let new_id = next_excerpt_id(); - Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts) - .insert(existing_id, new_id); - to_remove.push(existing_id); - let mut range = new_iter.next().unwrap(); - range.context.start = range.context.start.min(existing_range.start); - range.context.end = range.context.end.max(existing_range.end); - excerpt_ids.push(new_id); - to_insert.push((new_id, range)); - } - }; + let edits = Self::sync_diff_transforms( + &mut snapshot, + patch.into_inner(), + DiffChangeKind::BufferEdited, + ); + if !edits.is_empty() { + self.subscriptions.publish(edits); } - self.insert_excerpts_with_ids_after(insert_after, buffer, to_insert, cx); - // todo(lw): There is a logic bug somewhere that causes the to_remove vector to be not ordered correctly - to_remove.sort_by_cached_key(|&id| snapshot.excerpt_locator_for_id(id)); - self.remove_excerpts(to_remove, cx); + cx.emit(Event::Edited { + edited_buffer: None, + is_local: true, + }); + cx.emit(Event::BufferRangesUpdated { + buffer, + path_key: path_key.clone(), + ranges: new_ranges, + }); + cx.notify(); - if excerpt_ids.is_empty() { - self.excerpts_by_path.remove(&path); - } else { - let snapshot = &*self.snapshot.get_mut(); - let excerpt_ids = excerpt_ids - .iter() - .dedup() - .cloned() - // todo(lw): There is a logic bug somewhere that causes excerpt_ids to not necessarily be in order by locator - .sorted_by_cached_key(|&id| snapshot.excerpt_locator_for_id(id)) - .collect(); - for &excerpt_id in &excerpt_ids { - self.paths_by_excerpt.insert(excerpt_id, path.clone()); - } - self.excerpts_by_path.insert(path, excerpt_ids); + added_new_excerpt + } + + pub fn remove_excerpts_for_buffer(&mut self, buffer: BufferId, cx: &mut Context) { + let snapshot = self.sync_mut(cx); + let Some(path) = snapshot.path_for_buffer(buffer).cloned() else { + return; + }; + self.remove_excerpts(path, cx); + } + + pub fn remove_excerpts(&mut self, path: PathKey, cx: &mut Context) { + assert_eq!(self.history.transaction_depth(), 0); + self.sync_mut(cx); + + let mut snapshot = self.snapshot.get_mut(); + let mut cursor = snapshot + .excerpts + .cursor::>(()); + let mut new_excerpts = SumTree::new(()); + new_excerpts.append(cursor.slice(&path, Bias::Left), ()); + let mut edit_start = cursor.position.1; + let mut buffer_id = None; + if let Some(excerpt) = cursor.item() + && excerpt.path_key == path + { + buffer_id = Some(excerpt.buffer_id); } + cursor.seek(&path, Bias::Right); + let edit_end = cursor.position.1; + let suffix = cursor.suffix(); + let changed_trailing_excerpt = suffix.is_empty(); + new_excerpts.append(suffix, ()); + + if let Some(buffer_id) = buffer_id { + snapshot.buffers.remove(&buffer_id); + remove_diff_state(&mut snapshot.diffs, buffer_id); + self.buffers.remove(&buffer_id); + self.diffs.remove(&buffer_id); + cx.emit(Event::BuffersRemoved { + removed_buffer_ids: vec![buffer_id], + }) + } + drop(cursor); + if changed_trailing_excerpt { + snapshot.trailing_excerpt_update_count += 1; + new_excerpts.update_last( + |excerpt| { + if excerpt.has_trailing_newline { + excerpt.has_trailing_newline = false; + edit_start.0.0 = edit_start + .0 + .0 + .checked_sub(1) + .expect("should have at least one excerpt"); + } + }, + (), + ) + } + + let edit = Edit { + old: edit_start..edit_end, + new: edit_start..edit_start, + }; + snapshot.excerpts = new_excerpts; - PathExcerptInsertResult { - excerpt_ids, - added_new_excerpt: added_a_new_excerpt, + let edits = + Self::sync_diff_transforms(&mut snapshot, vec![edit], DiffChangeKind::BufferEdited); + if !edits.is_empty() { + self.subscriptions.publish(edits); } + + cx.emit(Event::Edited { + edited_buffer: None, + is_local: true, + }); + cx.notify(); } } diff --git a/crates/multi_buffer/src/transaction.rs b/crates/multi_buffer/src/transaction.rs index a65e394c8f1834a95ccbc70532aa03d2a3e6e34c..a3afe55cd6928b9e908d0249af5fb8fe7fc4bbe4 100644 --- a/crates/multi_buffer/src/transaction.rs +++ b/crates/multi_buffer/src/transaction.rs @@ -2,15 +2,15 @@ use gpui::{App, Context, Entity}; use language::{self, Buffer, TransactionId}; use std::{ collections::HashMap, - ops::{AddAssign, Range, Sub}, + ops::Range, time::{Duration, Instant}, }; use sum_tree::Bias; use text::BufferId; -use crate::{BufferState, MultiBufferDimension}; +use crate::{Anchor, BufferState, MultiBufferOffset}; -use super::{Event, ExcerptSummary, MultiBuffer}; +use super::{Event, MultiBuffer}; #[derive(Clone)] pub(super) struct History { @@ -314,71 +314,50 @@ impl MultiBuffer { } } - pub fn edited_ranges_for_transaction( + pub fn edited_ranges_for_transaction( &self, transaction_id: TransactionId, cx: &App, - ) -> Vec> - where - D: MultiBufferDimension - + Ord - + Sub - + AddAssign, - D::TextDimension: PartialOrd + Sub, - { + ) -> Vec> { let Some(transaction) = self.history.transaction(transaction_id) else { return Vec::new(); }; - let mut ranges = Vec::new(); let snapshot = self.read(cx); - let mut cursor = snapshot.excerpts.cursor::(()); + let mut buffer_anchors = Vec::new(); for (buffer_id, buffer_transaction) in &transaction.buffer_transactions { - let Some(buffer_state) = self.buffers.get(buffer_id) else { + let Some(buffer) = self.buffer(*buffer_id) else { continue; }; + let Some(excerpt) = snapshot.first_excerpt_for_buffer(*buffer_id) else { + continue; + }; + let buffer_snapshot = buffer.read(cx).snapshot(); - let buffer = buffer_state.buffer.read(cx); - for range in - buffer.edited_ranges_for_transaction_id::(*buffer_transaction) + for range in buffer + .read(cx) + .edited_ranges_for_transaction_id::(*buffer_transaction) { - for excerpt_id in &buffer_state.excerpts { - cursor.seek(excerpt_id, Bias::Left); - if let Some(excerpt) = cursor.item() - && excerpt.locator == *excerpt_id - { - let excerpt_buffer_start = excerpt - .range - .context - .start - .summary::(buffer); - let excerpt_buffer_end = excerpt - .range - .context - .end - .summary::(buffer); - let excerpt_range = excerpt_buffer_start..excerpt_buffer_end; - if excerpt_range.contains(&range.start) - && excerpt_range.contains(&range.end) - { - let excerpt_start = D::from_summary(&cursor.start().text); - - let mut start = excerpt_start; - start += range.start - excerpt_buffer_start; - let mut end = excerpt_start; - end += range.end - excerpt_buffer_start; - - ranges.push(start..end); - break; - } - } - } + buffer_anchors.push(Anchor::in_buffer( + excerpt.path_key_index, + buffer_snapshot.anchor_at(range.start, Bias::Left), + )); + buffer_anchors.push(Anchor::in_buffer( + excerpt.path_key_index, + buffer_snapshot.anchor_at(range.end, Bias::Right), + )); } } + buffer_anchors.sort_unstable_by(|a, b| a.cmp(b, &snapshot)); - ranges.sort_by_key(|range| range.start); - ranges + snapshot + .summaries_for_anchors(buffer_anchors.iter()) + .as_chunks::<2>() + .0 + .iter() + .map(|&[s, e]| s..e) + .collect::>() } pub fn merge_transactions( diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index a03c87d9f68e41dd29d9d614f714db47083831ef..af5671632fdac175e5d31ae15c5890d439b7860f 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -79,29 +79,37 @@ fn outline_for_editor( cx: &mut App, ) -> Option>>> { let multibuffer = editor.read(cx).buffer().read(cx).snapshot(cx); - let (excerpt_id, _, buffer_snapshot) = multibuffer.as_singleton()?; + let buffer_snapshot = multibuffer.as_singleton()?; let buffer_id = buffer_snapshot.remote_id(); let task = editor.update(cx, |editor, cx| editor.buffer_outline_items(buffer_id, cx)); Some(cx.background_executor().spawn(async move { task.await .into_iter() - .map(|item| OutlineItem { - depth: item.depth, - range: Anchor::range_in_buffer(excerpt_id, item.range), - source_range_for_text: Anchor::range_in_buffer( - excerpt_id, - item.source_range_for_text, - ), - text: item.text, - highlight_ranges: item.highlight_ranges, - name_ranges: item.name_ranges, - body_range: item - .body_range - .map(|r| Anchor::range_in_buffer(excerpt_id, r)), - annotation_range: item - .annotation_range - .map(|r| Anchor::range_in_buffer(excerpt_id, r)), + .filter_map(|item| { + Some(OutlineItem { + depth: item.depth, + range: multibuffer.anchor_in_buffer(item.range.start)? + ..multibuffer.anchor_in_buffer(item.range.end)?, + source_range_for_text: multibuffer + .anchor_in_buffer(item.source_range_for_text.start)? + ..multibuffer.anchor_in_buffer(item.source_range_for_text.end)?, + text: item.text, + highlight_ranges: item.highlight_ranges, + name_ranges: item.name_ranges, + body_range: item.body_range.and_then(|r| { + Some( + multibuffer.anchor_in_buffer(r.start)? + ..multibuffer.anchor_in_buffer(r.end)?, + ) + }), + annotation_range: item.annotation_range.and_then(|r| { + Some( + multibuffer.anchor_in_buffer(r.start)? + ..multibuffer.anchor_in_buffer(r.end)?, + ) + }), + }) }) .collect() })) diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index aa6f89cb8c11c40d4121ab12720069ee7fe66844..b7d5afcb687c017fdf253717a9dae2c95c55b53b 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -1,11 +1,11 @@ mod outline_panel_settings; use anyhow::Context as _; -use collections::{BTreeSet, HashMap, HashSet, hash_map}; +use collections::{BTreeSet, HashMap, HashSet}; use db::kvp::KeyValueStore; use editor::{ - AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, ExcerptId, ExcerptRange, - MultiBufferSnapshot, RangeToAnchorExt, SelectionEffects, + AnchorRangeExt, Bias, DisplayPoint, Editor, EditorEvent, ExcerptRange, MultiBufferSnapshot, + RangeToAnchorExt, SelectionEffects, display_map::ToDisplayPoint, items::{entry_git_aware_label_color, entry_label_color}, scroll::{Autoscroll, ScrollAnchor}, @@ -129,12 +129,12 @@ pub struct OutlinePanel { selected_entry: SelectedEntry, active_item: Option, _subscriptions: Vec, - new_entries_for_fs_update: HashSet, + new_entries_for_fs_update: HashSet, fs_entries_update_task: Task<()>, cached_entries_update_task: Task<()>, reveal_selection_task: Task>, outline_fetch_tasks: HashMap>, - excerpts: HashMap>, + buffers: HashMap, cached_entries: Vec, filter_editor: Entity, mode: ItemsDisplayMode, @@ -334,42 +334,41 @@ enum CollapsedEntry { Dir(WorktreeId, ProjectEntryId), File(WorktreeId, BufferId), ExternalFile(BufferId), - Excerpt(BufferId, ExcerptId), - Outline(BufferId, ExcerptId, Range), + Excerpt(ExcerptRange), + Outline(Range), } -#[derive(Debug)] -struct Excerpt { - range: ExcerptRange, - outlines: ExcerptOutlines, +struct BufferOutlines { + excerpts: Vec>, + outlines: OutlineState, } -impl Excerpt { +impl BufferOutlines { fn invalidate_outlines(&mut self) { - if let ExcerptOutlines::Outlines(valid_outlines) = &mut self.outlines { - self.outlines = ExcerptOutlines::Invalidated(std::mem::take(valid_outlines)); + if let OutlineState::Outlines(valid_outlines) = &mut self.outlines { + self.outlines = OutlineState::Invalidated(std::mem::take(valid_outlines)); } } fn iter_outlines(&self) -> impl Iterator { match &self.outlines { - ExcerptOutlines::Outlines(outlines) => outlines.iter(), - ExcerptOutlines::Invalidated(outlines) => outlines.iter(), - ExcerptOutlines::NotFetched => [].iter(), + OutlineState::Outlines(outlines) => outlines.iter(), + OutlineState::Invalidated(outlines) => outlines.iter(), + OutlineState::NotFetched => [].iter(), } } fn should_fetch_outlines(&self) -> bool { match &self.outlines { - ExcerptOutlines::Outlines(_) => false, - ExcerptOutlines::Invalidated(_) => true, - ExcerptOutlines::NotFetched => true, + OutlineState::Outlines(_) => false, + OutlineState::Invalidated(_) => true, + OutlineState::NotFetched => true, } } } #[derive(Debug)] -enum ExcerptOutlines { +enum OutlineState { Outlines(Vec), Invalidated(Vec), NotFetched, @@ -536,54 +535,24 @@ impl SearchData { } } -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -struct OutlineEntryExcerpt { - id: ExcerptId, - buffer_id: BufferId, - range: ExcerptRange, -} - -#[derive(Clone, Debug, Eq)] -struct OutlineEntryOutline { - buffer_id: BufferId, - excerpt_id: ExcerptId, - outline: Outline, -} - -impl PartialEq for OutlineEntryOutline { - fn eq(&self, other: &Self) -> bool { - self.buffer_id == other.buffer_id - && self.excerpt_id == other.excerpt_id - && self.outline.depth == other.outline.depth - && self.outline.range == other.outline.range - && self.outline.text == other.outline.text - } -} - -impl Hash for OutlineEntryOutline { - fn hash(&self, state: &mut H) { - ( - self.buffer_id, - self.excerpt_id, - self.outline.depth, - &self.outline.range, - &self.outline.text, - ) - .hash(state); - } -} - #[derive(Clone, Debug, PartialEq, Eq)] enum OutlineEntry { - Excerpt(OutlineEntryExcerpt), - Outline(OutlineEntryOutline), + Excerpt(ExcerptRange), + Outline(Outline), } impl OutlineEntry { - fn ids(&self) -> (BufferId, ExcerptId) { + fn buffer_id(&self) -> BufferId { match self { - OutlineEntry::Excerpt(excerpt) => (excerpt.buffer_id, excerpt.id), - OutlineEntry::Outline(outline) => (outline.buffer_id, outline.excerpt_id), + OutlineEntry::Excerpt(excerpt) => excerpt.context.start.buffer_id, + OutlineEntry::Outline(outline) => outline.range.start.buffer_id, + } + } + + fn range(&self) -> Range { + match self { + OutlineEntry::Excerpt(excerpt) => excerpt.context.clone(), + OutlineEntry::Outline(outline) => outline.range.clone(), } } } @@ -593,7 +562,7 @@ struct FsEntryFile { worktree_id: WorktreeId, entry: GitEntry, buffer_id: BufferId, - excerpts: Vec, + excerpts: Vec>, } impl PartialEq for FsEntryFile { @@ -631,7 +600,7 @@ impl Hash for FsEntryDirectory { #[derive(Debug, Clone, Eq)] struct FsEntryExternalFile { buffer_id: BufferId, - excerpts: Vec, + excerpts: Vec>, } impl PartialEq for FsEntryExternalFile { @@ -787,10 +756,8 @@ impl OutlinePanel { if ¤t_theme != new_theme { outline_panel_settings = *new_settings; current_theme = new_theme.clone(); - for excerpts in outline_panel.excerpts.values_mut() { - for excerpt in excerpts.values_mut() { - excerpt.invalidate_outlines(); - } + for buffer in outline_panel.buffers.values_mut() { + buffer.invalidate_outlines(); } outlines_invalidated = true; let update_cached_items = outline_panel.update_non_fs_items(window, cx); @@ -809,30 +776,23 @@ impl OutlinePanel { let new_depth = new_settings.expand_outlines_with_depth; - for (buffer_id, excerpts) in &outline_panel.excerpts { - for (excerpt_id, excerpt) in excerpts { - if let ExcerptOutlines::Outlines(outlines) = &excerpt.outlines { - for outline in outlines { - if outline_panel - .outline_children_cache - .get(buffer_id) - .and_then(|children_map| { - let key = - (outline.range.clone(), outline.depth); - children_map.get(&key) - }) - .copied() - .unwrap_or(false) - && (new_depth == 0 || outline.depth >= new_depth) - { - outline_panel.collapsed_entries.insert( - CollapsedEntry::Outline( - *buffer_id, - *excerpt_id, - outline.range.clone(), - ), - ); - } + for (buffer_id, buffer) in &outline_panel.buffers { + if let OutlineState::Outlines(outlines) = &buffer.outlines { + for outline in outlines { + if outline_panel + .outline_children_cache + .get(buffer_id) + .and_then(|children_map| { + let key = (outline.range.clone(), outline.depth); + children_map.get(&key) + }) + .copied() + .unwrap_or(false) + && (new_depth == 0 || outline.depth >= new_depth) + { + outline_panel.collapsed_entries.insert( + CollapsedEntry::Outline(outline.range.clone()), + ); } } } @@ -852,7 +812,7 @@ impl OutlinePanel { if !outlines_invalidated { let new_document_symbols = outline_panel - .excerpts + .buffers .keys() .filter_map(|buffer_id| { let buffer = outline_panel @@ -867,10 +827,8 @@ impl OutlinePanel { .collect(); if new_document_symbols != document_symbols_by_buffer { document_symbols_by_buffer = new_document_symbols; - for excerpts in outline_panel.excerpts.values_mut() { - for excerpt in excerpts.values_mut() { - excerpt.invalidate_outlines(); - } + for buffer in outline_panel.buffers.values_mut() { + buffer.invalidate_outlines(); } let update_cached_items = outline_panel.update_non_fs_items(window, cx); if update_cached_items { @@ -914,7 +872,7 @@ impl OutlinePanel { cached_entries_update_task: Task::ready(()), reveal_selection_task: Task::ready(Ok(())), outline_fetch_tasks: HashMap::default(), - excerpts: HashMap::default(), + buffers: HashMap::default(), cached_entries: Vec::new(), _subscriptions: vec![ settings_subscription, @@ -1110,16 +1068,13 @@ impl OutlinePanel { PanelEntry::Fs(FsEntry::ExternalFile(file)) => { change_selection = false; scroll_to_buffer = Some(file.buffer_id); - multi_buffer_snapshot.excerpts().find_map( - |(excerpt_id, buffer_snapshot, excerpt_range)| { - if buffer_snapshot.remote_id() == file.buffer_id { - multi_buffer_snapshot - .anchor_in_excerpt(excerpt_id, excerpt_range.context.start) - } else { - None - } - }, - ) + multi_buffer_snapshot.excerpts().find_map(|excerpt_range| { + if excerpt_range.context.start.buffer_id == file.buffer_id { + multi_buffer_snapshot.anchor_in_excerpt(excerpt_range.context.start) + } else { + None + } + }) } PanelEntry::Fs(FsEntry::File(file)) => { @@ -1132,26 +1087,20 @@ impl OutlinePanel { .and_then(|path| project.get_open_buffer(&path, cx)) }) .map(|buffer| { - active_multi_buffer - .read(cx) - .excerpts_for_buffer(buffer.read(cx).remote_id(), cx) + multi_buffer_snapshot.excerpts_for_buffer(buffer.read(cx).remote_id()) }) - .and_then(|excerpts| { - let (excerpt_id, _, excerpt_range) = excerpts.first()?; - multi_buffer_snapshot - .anchor_in_excerpt(*excerpt_id, excerpt_range.context.start) + .and_then(|mut excerpts| { + let excerpt_range = excerpts.next()?; + multi_buffer_snapshot.anchor_in_excerpt(excerpt_range.context.start) }) } PanelEntry::Outline(OutlineEntry::Outline(outline)) => multi_buffer_snapshot - .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.start) - .or_else(|| { - multi_buffer_snapshot - .anchor_in_excerpt(outline.excerpt_id, outline.outline.range.end) - }), + .anchor_in_excerpt(outline.range.start) + .or_else(|| multi_buffer_snapshot.anchor_in_excerpt(outline.range.end)), PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { change_selection = false; change_focus = false; - multi_buffer_snapshot.anchor_in_excerpt(excerpt.id, excerpt.range.context.start) + multi_buffer_snapshot.anchor_in_excerpt(excerpt.context.start) } PanelEntry::Search(search_entry) => Some(search_entry.match_range.start), }; @@ -1359,12 +1308,12 @@ impl OutlinePanel { PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { previous_entries.find(|entry| match entry { PanelEntry::Fs(FsEntry::File(file)) => { - file.buffer_id == excerpt.buffer_id - && file.excerpts.contains(&excerpt.id) + file.buffer_id == excerpt.context.start.buffer_id + && file.excerpts.contains(&excerpt) } PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { - external_file.buffer_id == excerpt.buffer_id - && external_file.excerpts.contains(&excerpt.id) + external_file.buffer_id == excerpt.context.start.buffer_id + && external_file.excerpts.contains(&excerpt) } _ => false, }) @@ -1372,8 +1321,16 @@ impl OutlinePanel { PanelEntry::Outline(OutlineEntry::Outline(outline)) => { previous_entries.find(|entry| { if let PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) = entry { - outline.buffer_id == excerpt.buffer_id - && outline.excerpt_id == excerpt.id + if outline.range.start.buffer_id != excerpt.context.start.buffer_id { + return false; + } + let Some(buffer_snapshot) = + self.buffer_snapshot_for_id(outline.range.start.buffer_id, cx) + else { + return false; + }; + excerpt.contains(&outline.range.start, &buffer_snapshot) + || excerpt.contains(&outline.range.end, &buffer_snapshot) } else { false } @@ -1584,13 +1541,11 @@ impl OutlinePanel { Some(CollapsedEntry::ExternalFile(external_file.buffer_id)) } PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { - Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)) + Some(CollapsedEntry::Excerpt(excerpt.clone())) + } + PanelEntry::Outline(OutlineEntry::Outline(outline)) => { + Some(CollapsedEntry::Outline(outline.range.clone())) } - PanelEntry::Outline(OutlineEntry::Outline(outline)) => Some(CollapsedEntry::Outline( - outline.buffer_id, - outline.excerpt_id, - outline.outline.range.clone(), - )), PanelEntry::Search(_) => return, }; let Some(collapsed_entry) = entry_to_expand else { @@ -1691,14 +1646,10 @@ impl OutlinePanel { } PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self .collapsed_entries - .insert(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)), - PanelEntry::Outline(OutlineEntry::Outline(outline)) => { - self.collapsed_entries.insert(CollapsedEntry::Outline( - outline.buffer_id, - outline.excerpt_id, - outline.outline.range.clone(), - )) - } + .insert(CollapsedEntry::Excerpt(excerpt.clone())), + PanelEntry::Outline(OutlineEntry::Outline(outline)) => self + .collapsed_entries + .insert(CollapsedEntry::Outline(outline.range.clone())), PanelEntry::Search(_) => false, }; @@ -1753,31 +1704,26 @@ impl OutlinePanel { } } - for (&buffer_id, excerpts) in &self.excerpts { - for (&excerpt_id, excerpt) in excerpts { - match &excerpt.outlines { - ExcerptOutlines::Outlines(outlines) => { - for outline in outlines { - to_uncollapse.insert(CollapsedEntry::Outline( - buffer_id, - excerpt_id, - outline.range.clone(), - )); - } + for (_buffer_id, buffer) in &self.buffers { + match &buffer.outlines { + OutlineState::Outlines(outlines) => { + for outline in outlines { + to_uncollapse.insert(CollapsedEntry::Outline(outline.range.clone())); } - ExcerptOutlines::Invalidated(outlines) => { - for outline in outlines { - to_uncollapse.insert(CollapsedEntry::Outline( - buffer_id, - excerpt_id, - outline.range.clone(), - )); - } + } + OutlineState::Invalidated(outlines) => { + for outline in outlines { + to_uncollapse.insert(CollapsedEntry::Outline(outline.range.clone())); } - ExcerptOutlines::NotFetched => {} } - to_uncollapse.insert(CollapsedEntry::Excerpt(buffer_id, excerpt_id)); + OutlineState::NotFetched => {} } + to_uncollapse.extend( + buffer + .excerpts + .iter() + .map(|excerpt| CollapsedEntry::Excerpt(excerpt.clone())), + ); } for cached in &self.cached_entries { @@ -1844,14 +1790,10 @@ impl OutlinePanel { .. }) => Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)), PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { - Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)) + Some(CollapsedEntry::Excerpt(excerpt.clone())) } PanelEntry::Outline(OutlineEntry::Outline(outline)) => { - Some(CollapsedEntry::Outline( - outline.buffer_id, - outline.excerpt_id, - outline.outline.range.clone(), - )) + Some(CollapsedEntry::Outline(outline.range.clone())) } PanelEntry::Search(_) => None, }, @@ -1939,17 +1881,13 @@ impl OutlinePanel { } } PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { - let collapsed_entry = CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id); + let collapsed_entry = CollapsedEntry::Excerpt(excerpt.clone()); if !self.collapsed_entries.remove(&collapsed_entry) { self.collapsed_entries.insert(collapsed_entry); } } PanelEntry::Outline(OutlineEntry::Outline(outline)) => { - let collapsed_entry = CollapsedEntry::Outline( - outline.buffer_id, - outline.excerpt_id, - outline.outline.range.clone(), - ); + let collapsed_entry = CollapsedEntry::Outline(outline.range.clone()); if !self.collapsed_entries.remove(&collapsed_entry) { self.collapsed_entries.insert(collapsed_entry); } @@ -2103,6 +2041,8 @@ impl OutlinePanel { let project = self.project.clone(); self.reveal_selection_task = cx.spawn_in(window, async move |outline_panel, cx| { cx.background_executor().timer(UPDATE_DEBOUNCE).await; + let multibuffer_snapshot = + editor.read_with(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx)); let entry_with_selection = outline_panel.update_in(cx, |outline_panel, window, cx| { outline_panel.location_for_editor_selection(&editor, window, cx) @@ -2132,14 +2072,28 @@ impl OutlinePanel { }) }), PanelEntry::Outline(outline_entry) => { - let (buffer_id, excerpt_id) = outline_entry.ids(); + let buffer_id = outline_entry.buffer_id(); + let outline_range = outline_entry.range(); outline_panel.update(cx, |outline_panel, cx| { outline_panel .collapsed_entries .remove(&CollapsedEntry::ExternalFile(buffer_id)); - outline_panel - .collapsed_entries - .remove(&CollapsedEntry::Excerpt(buffer_id, excerpt_id)); + if let Some(buffer_snapshot) = + outline_panel.buffer_snapshot_for_id(buffer_id, cx) + { + outline_panel.collapsed_entries.retain(|entry| match entry { + CollapsedEntry::Excerpt(excerpt_range) => { + let intersects = excerpt_range.context.start.buffer_id + == buffer_id + && (excerpt_range + .contains(&outline_range.start, &buffer_snapshot) + || excerpt_range + .contains(&outline_range.end, &buffer_snapshot)); + !intersects + } + _ => true, + }); + } let project = outline_panel.project.read(cx); let entry_id = project .buffer_for_id(buffer_id, cx) @@ -2160,11 +2114,9 @@ impl OutlinePanel { })? } PanelEntry::Fs(FsEntry::ExternalFile(..)) => None, - PanelEntry::Search(SearchEntry { match_range, .. }) => match_range - .start - .text_anchor - .buffer_id - .or(match_range.end.text_anchor.buffer_id) + PanelEntry::Search(SearchEntry { match_range, .. }) => multibuffer_snapshot + .anchor_to_buffer_anchor(match_range.start) + .map(|(anchor, _)| anchor.buffer_id) .map(|buffer_id| { outline_panel.update(cx, |outline_panel, cx| { outline_panel @@ -2246,30 +2198,30 @@ impl OutlinePanel { fn render_excerpt( &self, - excerpt: &OutlineEntryExcerpt, + excerpt: &ExcerptRange, depth: usize, window: &mut Window, cx: &mut Context, ) -> Option> { - let item_id = ElementId::from(excerpt.id.to_proto() as usize); + let item_id = ElementId::from(format!("{excerpt:?}")); let is_active = match self.selected_entry() { Some(PanelEntry::Outline(OutlineEntry::Excerpt(selected_excerpt))) => { - selected_excerpt.buffer_id == excerpt.buffer_id && selected_excerpt.id == excerpt.id + selected_excerpt == excerpt } _ => false, }; let has_outlines = self - .excerpts - .get(&excerpt.buffer_id) - .and_then(|excerpts| match &excerpts.get(&excerpt.id)?.outlines { - ExcerptOutlines::Outlines(outlines) => Some(outlines), - ExcerptOutlines::Invalidated(outlines) => Some(outlines), - ExcerptOutlines::NotFetched => None, + .buffers + .get(&excerpt.context.start.buffer_id) + .and_then(|buffer| match &buffer.outlines { + OutlineState::Outlines(outlines) => Some(outlines), + OutlineState::Invalidated(outlines) => Some(outlines), + OutlineState::NotFetched => None, }) .is_some_and(|outlines| !outlines.is_empty()); let is_expanded = !self .collapsed_entries - .contains(&CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)); + .contains(&CollapsedEntry::Excerpt(excerpt.clone())); let color = entry_label_color(is_active); let icon = if has_outlines { FileIcons::get_chevron_icon(is_expanded, cx) @@ -2279,7 +2231,7 @@ impl OutlinePanel { } .unwrap_or_else(empty_icon); - let label = self.excerpt_label(excerpt.buffer_id, &excerpt.range, cx)?; + let label = self.excerpt_label(&excerpt, cx)?; let label_element = Label::new(label) .single_line() .color(color) @@ -2297,13 +2249,8 @@ impl OutlinePanel { )) } - fn excerpt_label( - &self, - buffer_id: BufferId, - range: &ExcerptRange, - cx: &App, - ) -> Option { - let buffer_snapshot = self.buffer_snapshot_for_id(buffer_id, cx)?; + fn excerpt_label(&self, range: &ExcerptRange, cx: &App) -> Option { + let buffer_snapshot = self.buffer_snapshot_for_id(range.context.start.buffer_id, cx)?; let excerpt_range = range.context.to_point(&buffer_snapshot); Some(format!( "Lines {}- {}", @@ -2314,19 +2261,19 @@ impl OutlinePanel { fn render_outline( &self, - outline: &OutlineEntryOutline, + outline: &Outline, depth: usize, string_match: Option<&StringMatch>, window: &mut Window, cx: &mut Context, ) -> Stateful
{ let item_id = ElementId::from(SharedString::from(format!( - "{:?}|{:?}{:?}|{:?}", - outline.buffer_id, outline.excerpt_id, outline.outline.range, &outline.outline.text, + "{:?}|{:?}", + outline.range, &outline.text, ))); let label_element = outline::render_item( - &outline.outline, + &outline, string_match .map(|string_match| string_match.ranges().collect::>()) .unwrap_or_default(), @@ -2335,26 +2282,22 @@ impl OutlinePanel { .into_any_element(); let is_active = match self.selected_entry() { - Some(PanelEntry::Outline(OutlineEntry::Outline(selected))) => { - outline == selected && outline.outline == selected.outline - } + Some(PanelEntry::Outline(OutlineEntry::Outline(selected))) => outline == selected, _ => false, }; let has_children = self .outline_children_cache - .get(&outline.buffer_id) + .get(&outline.range.start.buffer_id) .and_then(|children_map| { - let key = (outline.outline.range.clone(), outline.outline.depth); + let key = (outline.range.clone(), outline.depth); children_map.get(&key) }) .copied() .unwrap_or(false); - let is_expanded = !self.collapsed_entries.contains(&CollapsedEntry::Outline( - outline.buffer_id, - outline.excerpt_id, - outline.outline.range.clone(), - )); + let is_expanded = !self + .collapsed_entries + .contains(&CollapsedEntry::Outline(outline.range.clone())); let icon = if has_children { FileIcons::get_chevron_icon(is_expanded, cx) @@ -2784,7 +2727,7 @@ impl OutlinePanel { let mut new_collapsed_entries = HashSet::default(); let mut new_unfolded_dirs = HashMap::default(); let mut root_entries = HashSet::default(); - let mut new_excerpts = HashMap::>::default(); + let mut new_buffers = HashMap::::default(); let Ok(buffer_excerpts) = outline_panel.update(cx, |outline_panel, cx| { let git_store = outline_panel.project.read(cx).git_store().clone(); new_collapsed_entries = outline_panel.collapsed_entries.clone(); @@ -2793,13 +2736,18 @@ impl OutlinePanel { multi_buffer_snapshot.excerpts().fold( HashMap::default(), - |mut buffer_excerpts, (excerpt_id, buffer_snapshot, excerpt_range)| { + |mut buffer_excerpts, excerpt_range| { + let Some(buffer_snapshot) = multi_buffer_snapshot + .buffer_for_id(excerpt_range.context.start.buffer_id) + else { + return buffer_excerpts; + }; let buffer_id = buffer_snapshot.remote_id(); let file = File::from_dyn(buffer_snapshot.file()); let entry_id = file.and_then(|file| file.project_entry_id()); let worktree = file.map(|file| file.worktree.read(cx).snapshot()); - let is_new = new_entries.contains(&excerpt_id) - || !outline_panel.excerpts.contains_key(&buffer_id); + let is_new = new_entries.contains(&buffer_id) + || !outline_panel.buffers.contains_key(&buffer_id); let is_folded = active_editor.read(cx).is_buffer_folded(buffer_id, cx); let status = git_store .read(cx) @@ -2813,29 +2761,28 @@ impl OutlinePanel { (is_new, is_folded, Vec::new(), entry_id, worktree, status) }) .2 - .push(excerpt_id); + .push(excerpt_range.clone()); - let outlines = match outline_panel - .excerpts - .get(&buffer_id) - .and_then(|excerpts| excerpts.get(&excerpt_id)) - { - Some(old_excerpt) => match &old_excerpt.outlines { - ExcerptOutlines::Outlines(outlines) => { - ExcerptOutlines::Outlines(outlines.clone()) + new_buffers + .entry(buffer_id) + .or_insert_with(|| { + let outlines = match outline_panel.buffers.get(&buffer_id) { + Some(old_buffer) => match &old_buffer.outlines { + OutlineState::Outlines(outlines) => { + OutlineState::Outlines(outlines.clone()) + } + OutlineState::Invalidated(_) => OutlineState::NotFetched, + OutlineState::NotFetched => OutlineState::NotFetched, + }, + None => OutlineState::NotFetched, + }; + BufferOutlines { + outlines, + excerpts: Vec::new(), } - ExcerptOutlines::Invalidated(_) => ExcerptOutlines::NotFetched, - ExcerptOutlines::NotFetched => ExcerptOutlines::NotFetched, - }, - None => ExcerptOutlines::NotFetched, - }; - new_excerpts.entry(buffer_id).or_default().insert( - excerpt_id, - Excerpt { - range: excerpt_range, - outlines, - }, - ); + }) + .excerpts + .push(excerpt_range); buffer_excerpts }, ) @@ -2856,7 +2803,7 @@ impl OutlinePanel { BTreeMap::>::default(); let mut worktree_excerpts = HashMap::< WorktreeId, - HashMap)>, + HashMap>)>, >::default(); let mut external_excerpts = HashMap::default(); @@ -3134,7 +3081,7 @@ impl OutlinePanel { outline_panel .update_in(cx, |outline_panel, window, cx| { outline_panel.new_entries_for_fs_update.clear(); - outline_panel.excerpts = new_excerpts; + outline_panel.buffers = new_buffers; outline_panel.collapsed_entries = new_collapsed_entries; outline_panel.unfolded_dirs = new_unfolded_dirs; outline_panel.fs_entries = new_fs_entries; @@ -3144,7 +3091,7 @@ impl OutlinePanel { // Only update cached entries if we don't have outlines to fetch // If we do have outlines to fetch, let fetch_outdated_outlines handle the update - if outline_panel.excerpt_fetch_ranges(cx).is_empty() { + if outline_panel.buffers_to_fetch().is_empty() { outline_panel.update_cached_entries(debounce, window, cx); } @@ -3192,8 +3139,15 @@ impl OutlinePanel { item_handle: new_active_item.downgrade_item(), active_editor: new_active_editor.downgrade(), }); - self.new_entries_for_fs_update - .extend(new_active_editor.read(cx).buffer().read(cx).excerpt_ids()); + self.new_entries_for_fs_update.extend( + new_active_editor + .read(cx) + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id), + ); self.selected_entry.invalidate(); self.update_fs_entries(new_active_editor, None, window, cx); } @@ -3211,7 +3165,7 @@ impl OutlinePanel { self.fs_entries.clear(); self.fs_entries_depth.clear(); self.fs_children_count.clear(); - self.excerpts.clear(); + self.buffers.clear(); self.cached_entries = Vec::new(); self.selected_entry = SelectedEntry::None; self.pinned = false; @@ -3225,23 +3179,14 @@ impl OutlinePanel { window: &mut Window, cx: &mut Context, ) -> Option { - let selection = editor.update(cx, |editor, cx| { - editor - .selections - .newest::(&editor.display_snapshot(cx)) - .head() - }); let editor_snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx)); let multi_buffer = editor.read(cx).buffer(); let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx); - let (excerpt_id, buffer, _) = editor - .read(cx) - .buffer() - .read(cx) - .excerpt_containing(selection, cx)?; - let buffer_id = buffer.read(cx).remote_id(); + let anchor = editor.update(cx, |editor, _| editor.selections.newest_anchor().head()); + let selection_display_point = anchor.to_display_point(&editor_snapshot); + let (anchor, _) = multi_buffer_snapshot.anchor_to_buffer_anchor(anchor)?; - if editor.read(cx).is_buffer_folded(buffer_id, cx) { + if editor.read(cx).is_buffer_folded(anchor.buffer_id, cx) { return self .fs_entries .iter() @@ -3254,14 +3199,12 @@ impl OutlinePanel { | FsEntry::ExternalFile(FsEntryExternalFile { buffer_id: other_buffer_id, .. - }) => buffer_id == *other_buffer_id, + }) => anchor.buffer_id == *other_buffer_id, }) .cloned() .map(PanelEntry::Fs); } - let selection_display_point = selection.to_display_point(&editor_snapshot); - match &self.mode { ItemsDisplayMode::Search(search_state) => search_state .matches @@ -3298,32 +3241,31 @@ impl OutlinePanel { }) }), ItemsDisplayMode::Outline => self.outline_location( - buffer_id, - excerpt_id, + anchor, multi_buffer_snapshot, editor_snapshot, selection_display_point, + cx, ), } } fn outline_location( &self, - buffer_id: BufferId, - excerpt_id: ExcerptId, + selection_anchor: Anchor, multi_buffer_snapshot: editor::MultiBufferSnapshot, editor_snapshot: editor::EditorSnapshot, selection_display_point: DisplayPoint, + cx: &App, ) -> Option { let excerpt_outlines = self - .excerpts - .get(&buffer_id) - .and_then(|excerpts| excerpts.get(&excerpt_id)) + .buffers + .get(&selection_anchor.buffer_id) .into_iter() - .flat_map(|excerpt| excerpt.iter_outlines()) + .flat_map(|buffer| buffer.iter_outlines()) .flat_map(|outline| { let range = multi_buffer_snapshot - .anchor_range_in_excerpt(excerpt_id, outline.range.clone())?; + .buffer_anchor_range_to_anchor_range(outline.range.clone())?; Some(( range.start.to_display_point(&editor_snapshot) ..range.end.to_display_point(&editor_snapshot), @@ -3411,16 +3353,16 @@ impl OutlinePanel { .cloned(); let closest_container = match outline_item { - Some(outline) => PanelEntry::Outline(OutlineEntry::Outline(OutlineEntryOutline { - buffer_id, - excerpt_id, - outline, - })), + Some(outline) => PanelEntry::Outline(OutlineEntry::Outline(outline)), None => { self.cached_entries.iter().rev().find_map(|cached_entry| { match &cached_entry.entry { PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { - if excerpt.buffer_id == buffer_id && excerpt.id == excerpt_id { + if excerpt.context.start.buffer_id == selection_anchor.buffer_id + && let Some(buffer_snapshot) = + self.buffer_snapshot_for_id(excerpt.context.start.buffer_id, cx) + && excerpt.contains(&selection_anchor, &buffer_snapshot) + { Some(cached_entry.entry.clone()) } else { None @@ -3430,6 +3372,7 @@ impl OutlinePanel { FsEntry::ExternalFile(FsEntryExternalFile { buffer_id: file_buffer_id, excerpts: file_excerpts, + .. }) | FsEntry::File(FsEntryFile { buffer_id: file_buffer_id, @@ -3437,7 +3380,13 @@ impl OutlinePanel { .. }), ) => { - if file_buffer_id == &buffer_id && file_excerpts.contains(&excerpt_id) { + if *file_buffer_id == selection_anchor.buffer_id + && let Some(buffer_snapshot) = + self.buffer_snapshot_for_id(*file_buffer_id, cx) + && file_excerpts.iter().any(|excerpt| { + excerpt.contains(&selection_anchor, &buffer_snapshot) + }) + { Some(cached_entry.entry.clone()) } else { None @@ -3452,18 +3401,17 @@ impl OutlinePanel { } fn fetch_outdated_outlines(&mut self, window: &mut Window, cx: &mut Context) { - let excerpt_fetch_ranges = self.excerpt_fetch_ranges(cx); - if excerpt_fetch_ranges.is_empty() { + let buffers_to_fetch = self.buffers_to_fetch(); + if buffers_to_fetch.is_empty() { return; } let first_update = Arc::new(AtomicBool::new(true)); - for (buffer_id, (_buffer_snapshot, excerpt_ranges)) in excerpt_fetch_ranges { + for buffer_id in buffers_to_fetch { let outline_task = self.active_editor().map(|editor| { editor.update(cx, |editor, cx| editor.buffer_outline_items(buffer_id, cx)) }); - let excerpt_ids = excerpt_ranges.keys().copied().collect::>(); let first_update = first_update.clone(); self.outline_fetch_tasks.insert( @@ -3498,40 +3446,26 @@ impl OutlinePanel { Some(UPDATE_DEBOUNCE) }; - for excerpt_id in &excerpt_ids { - if let Some(excerpt) = outline_panel - .excerpts - .entry(buffer_id) - .or_default() - .get_mut(excerpt_id) + if let Some(buffer) = outline_panel.buffers.get_mut(&buffer_id) { + buffer.outlines = OutlineState::Outlines(fetched_outlines.clone()); + + if let Some(default_depth) = pending_default_depth + && let OutlineState::Outlines(outlines) = &buffer.outlines { - excerpt.outlines = - ExcerptOutlines::Outlines(fetched_outlines.clone()); - - if let Some(default_depth) = pending_default_depth - && let ExcerptOutlines::Outlines(outlines) = - &excerpt.outlines - { - outlines - .iter() - .filter(|outline| { - (default_depth == 0 - || outline.depth >= default_depth) - && outlines_with_children.contains(&( - outline.range.clone(), - outline.depth, - )) - }) - .for_each(|outline| { - outline_panel.collapsed_entries.insert( - CollapsedEntry::Outline( - buffer_id, - *excerpt_id, - outline.range.clone(), - ), - ); - }); - } + outlines + .iter() + .filter(|outline| { + (default_depth == 0 || outline.depth >= default_depth) + && outlines_with_children.contains(&( + outline.range.clone(), + outline.depth, + )) + }) + .for_each(|outline| { + outline_panel.collapsed_entries.insert( + CollapsedEntry::Outline(outline.range.clone()), + ); + }); } } @@ -3548,73 +3482,35 @@ impl OutlinePanel { .is_some_and(|active_editor| active_editor.read(cx).buffer().read(cx).is_singleton()) } - fn invalidate_outlines(&mut self, ids: &[ExcerptId]) { + fn invalidate_outlines(&mut self, ids: &[BufferId]) { self.outline_fetch_tasks.clear(); let mut ids = ids.iter().collect::>(); - for excerpts in self.excerpts.values_mut() { - ids.retain(|id| { - if let Some(excerpt) = excerpts.get_mut(id) { - excerpt.invalidate_outlines(); - false - } else { - true - } - }); + for (buffer_id, buffer) in self.buffers.iter_mut() { + if ids.remove(&buffer_id) { + buffer.invalidate_outlines(); + } if ids.is_empty() { break; } } } - fn excerpt_fetch_ranges( - &self, - cx: &App, - ) -> HashMap< - BufferId, - ( - BufferSnapshot, - HashMap>, - ), - > { + fn buffers_to_fetch(&self) -> HashSet { self.fs_entries .iter() - .fold(HashMap::default(), |mut excerpts_to_fetch, fs_entry| { + .fold(HashSet::default(), |mut buffers_to_fetch, fs_entry| { match fs_entry { - FsEntry::File(FsEntryFile { - buffer_id, - excerpts: file_excerpts, - .. - }) - | FsEntry::ExternalFile(FsEntryExternalFile { - buffer_id, - excerpts: file_excerpts, - }) => { - let excerpts = self.excerpts.get(buffer_id); - for &file_excerpt in file_excerpts { - if let Some(excerpt) = excerpts - .and_then(|excerpts| excerpts.get(&file_excerpt)) - .filter(|excerpt| excerpt.should_fetch_outlines()) - { - match excerpts_to_fetch.entry(*buffer_id) { - hash_map::Entry::Occupied(mut o) => { - o.get_mut().1.insert(file_excerpt, excerpt.range.clone()); - } - hash_map::Entry::Vacant(v) => { - if let Some(buffer_snapshot) = - self.buffer_snapshot_for_id(*buffer_id, cx) - { - v.insert((buffer_snapshot, HashMap::default())) - .1 - .insert(file_excerpt, excerpt.range.clone()); - } - } - } - } + FsEntry::File(FsEntryFile { buffer_id, .. }) + | FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }) => { + if let Some(buffer) = self.buffers.get(buffer_id) + && buffer.should_fetch_outlines() + { + buffers_to_fetch.insert(*buffer_id); } } FsEntry::Directory(..) => {} } - excerpts_to_fetch + buffers_to_fetch }) } @@ -4012,13 +3908,12 @@ impl OutlinePanel { } else { None }; - if let Some((buffer_id, entry_excerpts)) = excerpts_to_consider + if let Some((buffer_id, _entry_excerpts)) = excerpts_to_consider && !active_editor.read(cx).is_buffer_folded(buffer_id, cx) { - outline_panel.add_excerpt_entries( + outline_panel.add_buffer_entries( &mut generation_state, buffer_id, - entry_excerpts, depth, track_matches, is_singleton, @@ -4166,7 +4061,7 @@ impl OutlinePanel { } PanelEntry::Outline(OutlineEntry::Outline(outline_entry)) => state .match_candidates - .push(StringMatchCandidate::new(id, &outline_entry.outline.text)), + .push(StringMatchCandidate::new(id, &outline_entry.text)), PanelEntry::Outline(OutlineEntry::Excerpt(_)) => {} PanelEntry::Search(new_search_entry) => { if let Some(search_data) = new_search_entry.render_data.get() { @@ -4333,131 +4228,118 @@ impl OutlinePanel { update_cached_entries } - fn add_excerpt_entries( + fn add_buffer_entries( &mut self, state: &mut GenerationState, buffer_id: BufferId, - entries_to_add: &[ExcerptId], parent_depth: usize, track_matches: bool, is_singleton: bool, query: Option<&str>, cx: &mut Context, ) { - if let Some(excerpts) = self.excerpts.get(&buffer_id) { - let buffer_snapshot = self.buffer_snapshot_for_id(buffer_id, cx); + let Some(buffer) = self.buffers.get(&buffer_id) else { + return; + }; - for &excerpt_id in entries_to_add { - let Some(excerpt) = excerpts.get(&excerpt_id) else { - continue; - }; - let excerpt_depth = parent_depth + 1; - self.push_entry( - state, - track_matches, - PanelEntry::Outline(OutlineEntry::Excerpt(OutlineEntryExcerpt { - buffer_id, - id: excerpt_id, - range: excerpt.range.clone(), - })), - excerpt_depth, - cx, - ); + let buffer_snapshot = self.buffer_snapshot_for_id(buffer_id, cx); - let mut outline_base_depth = excerpt_depth + 1; - if is_singleton { - outline_base_depth = 0; - state.clear(); - } else if query.is_none() - && self - .collapsed_entries - .contains(&CollapsedEntry::Excerpt(buffer_id, excerpt_id)) - { - continue; - } + for excerpt in &buffer.excerpts { + let excerpt_depth = parent_depth + 1; + self.push_entry( + state, + track_matches, + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt.clone())), + excerpt_depth, + cx, + ); + + let mut outline_base_depth = excerpt_depth + 1; + if is_singleton { + outline_base_depth = 0; + state.clear(); + } else if query.is_none() + && self + .collapsed_entries + .contains(&CollapsedEntry::Excerpt(excerpt.clone())) + { + continue; + } - let mut last_depth_at_level: Vec>> = vec![None; 10]; + let mut last_depth_at_level: Vec>> = vec![None; 10]; - let all_outlines: Vec<_> = excerpt.iter_outlines().collect(); + let all_outlines: Vec<_> = buffer.iter_outlines().collect(); - let mut outline_has_children = HashMap::default(); - let mut visible_outlines = Vec::new(); - let mut collapsed_state: Option<(usize, Range)> = None; + let mut outline_has_children = HashMap::default(); + let mut visible_outlines = Vec::new(); + let mut collapsed_state: Option<(usize, Range)> = None; - for (i, &outline) in all_outlines.iter().enumerate() { - let has_children = all_outlines - .get(i + 1) - .map(|next| next.depth > outline.depth) - .unwrap_or(false); + for (i, &outline) in all_outlines.iter().enumerate() { + let has_children = all_outlines + .get(i + 1) + .map(|next| next.depth > outline.depth) + .unwrap_or(false); - outline_has_children - .insert((outline.range.clone(), outline.depth), has_children); + outline_has_children.insert((outline.range.clone(), outline.depth), has_children); - let mut should_include = true; + let mut should_include = true; - if let Some((collapsed_depth, collapsed_range)) = &collapsed_state { - if outline.depth <= *collapsed_depth { + if let Some((collapsed_depth, collapsed_range)) = &collapsed_state { + if outline.depth <= *collapsed_depth { + collapsed_state = None; + } else if let Some(buffer_snapshot) = buffer_snapshot.as_ref() { + let outline_start = outline.range.start; + if outline_start + .cmp(&collapsed_range.start, buffer_snapshot) + .is_ge() + && outline_start + .cmp(&collapsed_range.end, buffer_snapshot) + .is_lt() + { + should_include = false; // Skip - inside collapsed range + } else { collapsed_state = None; - } else if let Some(buffer_snapshot) = buffer_snapshot.as_ref() { - let outline_start = outline.range.start; - if outline_start - .cmp(&collapsed_range.start, buffer_snapshot) - .is_ge() - && outline_start - .cmp(&collapsed_range.end, buffer_snapshot) - .is_lt() - { - should_include = false; // Skip - inside collapsed range - } else { - collapsed_state = None; - } } } + } - // Check if this outline itself is collapsed - if should_include - && self.collapsed_entries.contains(&CollapsedEntry::Outline( - buffer_id, - excerpt_id, - outline.range.clone(), - )) - { - collapsed_state = Some((outline.depth, outline.range.clone())); - } + // Check if this outline itself is collapsed + if should_include + && self + .collapsed_entries + .contains(&CollapsedEntry::Outline(outline.range.clone())) + { + collapsed_state = Some((outline.depth, outline.range.clone())); + } - if should_include { - visible_outlines.push(outline); - } + if should_include { + visible_outlines.push(outline); } + } - self.outline_children_cache - .entry(buffer_id) - .or_default() - .extend(outline_has_children); + self.outline_children_cache + .entry(buffer_id) + .or_default() + .extend(outline_has_children); - for outline in visible_outlines { - let outline_entry = OutlineEntryOutline { - buffer_id, - excerpt_id, - outline: outline.clone(), - }; + for outline in visible_outlines { + let outline_entry = outline.clone(); - if outline.depth < last_depth_at_level.len() { - last_depth_at_level[outline.depth] = Some(outline.range.clone()); - // Clear deeper levels when we go back to a shallower depth - for d in (outline.depth + 1)..last_depth_at_level.len() { - last_depth_at_level[d] = None; - } + if outline.depth < last_depth_at_level.len() { + last_depth_at_level[outline.depth] = Some(outline.range.clone()); + // Clear deeper levels when we go back to a shallower depth + for d in (outline.depth + 1)..last_depth_at_level.len() { + last_depth_at_level[d] = None; } - - self.push_entry( - state, - track_matches, - PanelEntry::Outline(OutlineEntry::Outline(outline_entry)), - outline_base_depth + outline.depth, - cx, - ); } + + self.push_entry( + state, + track_matches, + PanelEntry::Outline(OutlineEntry::Outline(outline_entry)), + outline_base_depth + outline.depth, + cx, + ); } } } @@ -4483,32 +4365,37 @@ impl OutlinePanel { FsEntry::File(file) => &file.excerpts, } .iter() - .copied() + .cloned() .collect::>(); let depth = if is_singleton { 0 } else { parent_depth + 1 }; - let new_search_matches = search_state - .matches - .iter() - .filter(|(match_range, _)| { - related_excerpts.contains(&match_range.start.excerpt_id) - || related_excerpts.contains(&match_range.end.excerpt_id) - }) - .filter(|(match_range, _)| { - let editor = active_editor.read(cx); - let snapshot = editor.buffer().read(cx).snapshot(cx); - if let Some(buffer_id) = snapshot.buffer_id_for_anchor(match_range.start) - && editor.is_buffer_folded(buffer_id, cx) - { - return false; - } - if let Some(buffer_id) = snapshot.buffer_id_for_anchor(match_range.end) - && editor.is_buffer_folded(buffer_id, cx) - { + let new_search_matches = search_state.matches.iter().filter(|(match_range, _)| { + let editor = active_editor.read(cx); + let snapshot = editor.buffer().read(cx).snapshot(cx); + if !related_excerpts.iter().any(|excerpt| { + let (Some(start), Some(end)) = ( + snapshot.anchor_in_buffer(excerpt.context.start), + snapshot.anchor_in_buffer(excerpt.context.end), + ) else { return false; - } - true - }); + }; + let excerpt_range = start..end; + excerpt_range.overlaps(match_range, &snapshot) + }) { + return false; + }; + if let Some((buffer_anchor, _)) = snapshot.anchor_to_buffer_anchor(match_range.start) + && editor.is_buffer_folded(buffer_anchor.buffer_id, cx) + { + return false; + } + if let Some((buffer_anchor, _)) = snapshot.anchor_to_buffer_anchor(match_range.end) + && editor.is_buffer_folded(buffer_anchor.buffer_id, cx) + { + return false; + } + true + }); let new_search_entries = new_search_matches .map(|(match_range, search_data)| SearchEntry { @@ -4626,10 +4513,10 @@ impl OutlinePanel { + folded_dirs.entries.len().saturating_sub(1) * "/".len() } PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => self - .excerpt_label(excerpt.buffer_id, &excerpt.range, cx) + .excerpt_label(&excerpt, cx) .map(|label| label.len()) .unwrap_or_default(), - PanelEntry::Outline(OutlineEntry::Outline(entry)) => entry.outline.text.len(), + PanelEntry::Outline(OutlineEntry::Outline(entry)) => entry.text.len(), PanelEntry::Search(search) => search .render_data .get() @@ -5212,31 +5099,21 @@ fn subscribe_for_editor_events( outline_panel.reveal_entry_for_selection(editor.clone(), window, cx); cx.notify(); } - EditorEvent::ExcerptsAdded { excerpts, .. } => { + EditorEvent::BuffersRemoved { removed_buffer_ids } => { outline_panel - .new_entries_for_fs_update - .extend(excerpts.iter().map(|&(excerpt_id, _)| excerpt_id)); + .buffers + .retain(|buffer_id, _| !removed_buffer_ids.contains(buffer_id)); outline_panel.update_fs_entries(editor.clone(), debounce, window, cx); } - EditorEvent::ExcerptsRemoved { ids, .. } => { - let mut ids = ids.iter().collect::>(); - for excerpts in outline_panel.excerpts.values_mut() { - excerpts.retain(|excerpt_id, _| !ids.remove(excerpt_id)); - if ids.is_empty() { - break; - } - } + EditorEvent::BufferRangesUpdated { buffer, .. } => { + outline_panel + .new_entries_for_fs_update + .insert(buffer.read(cx).remote_id()); + outline_panel.invalidate_outlines(&[buffer.read(cx).remote_id()]); outline_panel.update_fs_entries(editor.clone(), debounce, window, cx); } - EditorEvent::ExcerptsExpanded { ids } => { - outline_panel.invalidate_outlines(ids); - let update_cached_items = outline_panel.update_non_fs_items(window, cx); - if update_cached_items { - outline_panel.update_cached_entries(Some(UPDATE_DEBOUNCE), window, cx); - } - } - EditorEvent::ExcerptsEdited { ids } => { - outline_panel.invalidate_outlines(ids); + EditorEvent::BuffersEdited { buffer_ids } => { + outline_panel.invalidate_outlines(buffer_ids); let update_cached_items = outline_panel.update_non_fs_items(window, cx); if update_cached_items { outline_panel.update_cached_entries(Some(UPDATE_DEBOUNCE), window, cx); @@ -5250,29 +5127,20 @@ fn subscribe_for_editor_events( outline_panel.new_entries_for_fs_update.extend( ids.iter() .filter(|id| { - outline_panel - .excerpts - .iter() - .find_map(|(buffer_id, excerpts)| { - if excerpts.contains_key(id) { - ignore_selections_change |= outline_panel - .preserve_selection_on_buffer_fold_toggles - .remove(buffer_id); - Some(buffer_id) - } else { - None - } - }) - .map(|buffer_id| { - if editor.read(cx).is_buffer_folded(*buffer_id, cx) { - latest_folded_buffer_id = Some(*buffer_id); - false - } else { - latest_unfolded_buffer_id = Some(*buffer_id); - true - } - }) - .unwrap_or(true) + if outline_panel.buffers.contains_key(&id) { + ignore_selections_change |= outline_panel + .preserve_selection_on_buffer_fold_toggles + .remove(&id); + if editor.read(cx).is_buffer_folded(**id, cx) { + latest_folded_buffer_id = Some(**id); + false + } else { + latest_unfolded_buffer_id = Some(**id); + true + } + } else { + false + } }) .copied(), ); @@ -5308,10 +5176,8 @@ fn subscribe_for_editor_events( outline_panel.update_fs_entries(editor.clone(), debounce, window, cx); } EditorEvent::Reparsed(buffer_id) => { - if let Some(excerpts) = outline_panel.excerpts.get_mut(buffer_id) { - for excerpt in excerpts.values_mut() { - excerpt.invalidate_outlines(); - } + if let Some(buffer) = outline_panel.buffers.get_mut(buffer_id) { + buffer.invalidate_outlines(); } let update_cached_items = outline_panel.update_non_fs_items(window, cx); if update_cached_items { @@ -5319,10 +5185,8 @@ fn subscribe_for_editor_events( } } EditorEvent::OutlineSymbolsChanged => { - for excerpts in outline_panel.excerpts.values_mut() { - for excerpt in excerpts.values_mut() { - excerpt.invalidate_outlines(); - } + for buffer in outline_panel.buffers.values_mut() { + buffer.invalidate_outlines(); } if matches!( outline_panel.selected_entry(), @@ -6875,7 +6739,7 @@ outline: struct OutlineEntryExcerpt PanelEntry::Outline(outline_entry) => match outline_entry { OutlineEntry::Excerpt(_) => continue, OutlineEntry::Outline(outline_entry) => { - format!("outline: {}", outline_entry.outline.text) + format!("outline: {}", outline_entry.text) } }, PanelEntry::Search(search_entry) => { @@ -7243,10 +7107,9 @@ outline: fn main" PanelEntry::Outline(OutlineEntry::Outline(outline)) if panel .outline_children_cache - .get(&outline.buffer_id) + .get(&outline.range.start.buffer_id) .and_then(|children_map| { - let key = - (outline.outline.range.clone(), outline.outline.depth); + let key = (outline.range.clone(), outline.depth); children_map.get(&key) }) .copied() @@ -7333,9 +7196,9 @@ outline: fn main" PanelEntry::Outline(OutlineEntry::Outline(outline)) if panel .outline_children_cache - .get(&outline.buffer_id) + .get(&outline.range.start.buffer_id) .and_then(|children_map| { - let key = (outline.outline.range.clone(), outline.outline.depth); + let key = (outline.range.clone(), outline.depth); children_map.get(&key) }) .copied() @@ -7711,10 +7574,9 @@ outline: fn main" PanelEntry::Outline(OutlineEntry::Outline(outline)) if panel .outline_children_cache - .get(&outline.buffer_id) + .get(&outline.range.start.buffer_id) .and_then(|children_map| { - let key = - (outline.outline.range.clone(), outline.outline.depth); + let key = (outline.range.clone(), outline.depth); children_map.get(&key) }) .copied() @@ -8173,7 +8035,7 @@ outline: struct Foo <==== selected outline_panel.read_with(cx, |panel, _cx| { panel.selected_entry().and_then(|entry| match entry { PanelEntry::Outline(OutlineEntry::Outline(outline)) => { - Some(outline.outline.text.clone()) + Some(outline.text.clone()) } _ => None, }) diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index 1e529cd53f2d2527af8525886d11dbcddbf33a34..eba5b3096194fe8a3379efeb9b230a6004cd2e36 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -121,6 +121,9 @@ pub trait PickerDelegate: Sized + 'static { ) -> bool { true } + fn select_on_hover(&self) -> bool { + true + } // Allows binding some optional effect to when the selection changes. fn selected_index_changed( @@ -788,12 +791,14 @@ impl Picker { this.handle_click(ix, event.modifiers.platform, window, cx) }), ) - .on_hover(cx.listener(move |this, hovered: &bool, window, cx| { - if *hovered { - this.set_selected_index(ix, None, false, window, cx); - cx.notify(); - } - })) + .when(self.delegate.select_on_hover(), |this| { + this.on_hover(cx.listener(move |this, hovered: &bool, window, cx| { + if *hovered { + this.set_selected_index(ix, None, false, window, cx); + cx.notify(); + } + })) + }) .children(self.delegate.render_match( ix, ix == self.delegate.selected_index(), diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml index ccffbd29f4bd03b0d4bb0a070f4229a517597468..cd037786a399eb979fd5d9053c57efe3100dd473 100644 --- a/crates/project/Cargo.toml +++ b/crates/project/Cargo.toml @@ -98,6 +98,7 @@ watch.workspace = true wax.workspace = true which.workspace = true worktree.workspace = true +zed_credentials_provider.workspace = true zeroize.workspace = true zlog.workspace = true ztracing.workspace = true diff --git a/crates/project/src/context_server_store.rs b/crates/project/src/context_server_store.rs index 395056384a79d39c978e14643166148685ea0b90..7b9fc16f10022805ea62df2f8b3df279fc96ae3d 100644 --- a/crates/project/src/context_server_store.rs +++ b/crates/project/src/context_server_store.rs @@ -684,7 +684,7 @@ impl ContextServerStore { let server_url = url.clone(); let id = id.clone(); cx.spawn(async move |_this, cx| { - let credentials_provider = cx.update(|cx| ::global(cx)); + let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx)); if let Err(err) = Self::clear_session(&credentials_provider, &server_url, &cx).await { log::warn!("{} failed to clear OAuth session on removal: {}", id, err); @@ -797,8 +797,7 @@ impl ContextServerStore { if configuration.has_static_auth_header() { None } else { - let credentials_provider = - cx.update(|cx| ::global(cx)); + let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx)); let http_client = cx.update(|cx| cx.http_client()); match Self::load_session(&credentials_provider, url, &cx).await { @@ -1070,7 +1069,7 @@ impl ContextServerStore { .context("Failed to start OAuth callback server")?; let http_client = cx.update(|cx| cx.http_client()); - let credentials_provider = cx.update(|cx| ::global(cx)); + let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx)); let server_url = match configuration.as_ref() { ContextServerConfiguration::Http { url, .. } => url.clone(), _ => anyhow::bail!("OAuth authentication only supported for HTTP servers"), @@ -1233,7 +1232,7 @@ impl ContextServerStore { self.stop_server(&id, cx)?; cx.spawn(async move |this, cx| { - let credentials_provider = cx.update(|cx| ::global(cx)); + let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx)); if let Err(err) = Self::clear_session(&credentials_provider, &server_url, &cx).await { log::error!("{} failed to clear OAuth session: {}", id, err); } @@ -1451,7 +1450,7 @@ async fn resolve_start_failure( // (e.g. timeout because the server rejected the token silently). Clear it // so the next start attempt can get a clean 401 and trigger the auth flow. if www_authenticate.is_none() { - let credentials_provider = cx.update(|cx| ::global(cx)); + let credentials_provider = cx.update(|cx| zed_credentials_provider::global(cx)); match ContextServerStore::load_session(&credentials_provider, &server_url, cx).await { Ok(Some(_)) => { log::info!("{id} start failed with a cached OAuth session present; clearing it"); diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 36479fb80f561665e01853eba5a214eb84088361..e7e84ffe673881d898a56b64892887b9c8d6c809 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -287,6 +287,7 @@ pub struct RepositorySnapshot { pub original_repo_abs_path: Arc, pub path_style: PathStyle, pub branch: Option, + pub branch_list: Arc<[Branch]>, pub head_commit: Option, pub scan_id: u64, pub merge: MergeDetails, @@ -328,6 +329,12 @@ pub struct GraphDataResponse<'a> { pub error: Option, } +#[derive(Clone, Debug)] +enum CreateWorktreeStartPoint { + Detached, + Branched { name: String }, +} + pub struct Repository { this: WeakEntity, snapshot: RepositorySnapshot, @@ -428,7 +435,8 @@ pub enum GitGraphEvent { #[derive(Clone, Debug, PartialEq, Eq)] pub enum RepositoryEvent { StatusesChanged, - BranchChanged, + HeadChanged, + BranchListChanged, StashEntriesChanged, GitWorktreeListChanged, PendingOpsChanged { pending_ops: SumTree }, @@ -560,6 +568,10 @@ impl GitStore { client.add_entity_request_handler(Self::handle_run_hook); client.add_entity_request_handler(Self::handle_reset); client.add_entity_request_handler(Self::handle_show); + client.add_entity_request_handler(Self::handle_create_checkpoint); + client.add_entity_request_handler(Self::handle_restore_checkpoint); + client.add_entity_request_handler(Self::handle_compare_checkpoints); + client.add_entity_request_handler(Self::handle_diff_checkpoints); client.add_entity_request_handler(Self::handle_load_commit_diff); client.add_entity_request_handler(Self::handle_file_history); client.add_entity_request_handler(Self::handle_checkout_files); @@ -582,6 +594,7 @@ impl GitStore { client.add_entity_request_handler(Self::handle_create_worktree); client.add_entity_request_handler(Self::handle_remove_worktree); client.add_entity_request_handler(Self::handle_rename_worktree); + client.add_entity_request_handler(Self::handle_get_head_sha); } pub fn is_local(&self) -> bool { @@ -1799,6 +1812,26 @@ impl GitStore { &self.repositories } + /// Returns the original (main) repository working directory for the given worktree. + /// For normal checkouts this equals the worktree's own path; for linked + /// worktrees it points back to the original repo. + pub fn original_repo_path_for_worktree( + &self, + worktree_id: WorktreeId, + cx: &App, + ) -> Option> { + self.active_repo_id + .iter() + .chain(self.worktree_ids.keys()) + .find(|repo_id| { + self.worktree_ids + .get(repo_id) + .is_some_and(|ids| ids.contains(&worktree_id)) + }) + .and_then(|repo_id| self.repositories.get(repo_id)) + .map(|repo| repo.read(cx).snapshot().original_repo_abs_path) + } + pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option { let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?; let status = repo.read(cx).snapshot.status_for_path(&path)?; @@ -2314,6 +2347,7 @@ impl GitStore { CommitOptions { amend: options.amend, signoff: options.signoff, + allow_empty: options.allow_empty, }, askpass, cx, @@ -2380,12 +2414,18 @@ impl GitStore { let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; let directory = PathBuf::from(envelope.payload.directory); - let name = envelope.payload.name; + let start_point = if envelope.payload.name.is_empty() { + CreateWorktreeStartPoint::Detached + } else { + CreateWorktreeStartPoint::Branched { + name: envelope.payload.name, + } + }; let commit = envelope.payload.commit; repository_handle .update(&mut cx, |repository_handle, _| { - repository_handle.create_worktree(name, directory, commit) + repository_handle.create_worktree_with_start_point(start_point, directory, commit) }) .await??; @@ -2430,6 +2470,21 @@ impl GitStore { Ok(proto::Ack {}) } + async fn handle_get_head_sha( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let head_sha = repository_handle + .update(&mut cx, |repository_handle, _| repository_handle.head_sha()) + .await??; + + Ok(proto::GitGetHeadShaResponse { sha: head_sha }) + } + async fn handle_get_branches( this: Entity, envelope: TypedEnvelope, @@ -2599,6 +2654,92 @@ impl GitStore { }) } + async fn handle_create_checkpoint( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let checkpoint = repository_handle + .update(&mut cx, |repository, _| repository.checkpoint()) + .await??; + + Ok(proto::GitCreateCheckpointResponse { + commit_sha: checkpoint.commit_sha.as_bytes().to_vec(), + }) + } + + async fn handle_restore_checkpoint( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let checkpoint = GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&envelope.payload.commit_sha)?, + }; + + repository_handle + .update(&mut cx, |repository, _| { + repository.restore_checkpoint(checkpoint) + }) + .await??; + + Ok(proto::Ack {}) + } + + async fn handle_compare_checkpoints( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let left = GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&envelope.payload.left_commit_sha)?, + }; + let right = GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&envelope.payload.right_commit_sha)?, + }; + + let equal = repository_handle + .update(&mut cx, |repository, _| { + repository.compare_checkpoints(left, right) + }) + .await??; + + Ok(proto::GitCompareCheckpointsResponse { equal }) + } + + async fn handle_diff_checkpoints( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + + let base = GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&envelope.payload.base_commit_sha)?, + }; + let target = GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&envelope.payload.target_commit_sha)?, + }; + + let diff = repository_handle + .update(&mut cx, |repository, _| { + repository.diff_checkpoints(base, target) + }) + .await??; + + Ok(proto::GitDiffCheckpointsResponse { diff }) + } + async fn handle_load_commit_diff( this: Entity, envelope: TypedEnvelope, @@ -3576,6 +3717,7 @@ impl RepositorySnapshot { .unwrap_or_else(|| work_directory_abs_path.clone()), work_directory_abs_path, branch: None, + branch_list: Arc::from([]), head_commit: None, scan_id: 0, merge: Default::default(), @@ -3938,11 +4080,17 @@ impl Repository { .shared(); cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event { - RepositoryEvent::BranchChanged => { + RepositoryEvent::HeadChanged | RepositoryEvent::BranchListChanged => { if this.scan_id > 1 { this.initial_graph_data.clear(); } } + RepositoryEvent::StashEntriesChanged => { + if this.scan_id > 1 { + this.initial_graph_data + .retain(|(log_source, _), _| *log_source != LogSource::All); + } + } _ => {} }) .detach(); @@ -4705,12 +4853,11 @@ impl Repository { .commit_oid_to_index .insert(commit_data.sha, graph_data.commit_data.len()); graph_data.commit_data.push(commit_data); - - cx.emit(RepositoryEvent::GraphEvent( - graph_data_key.clone(), - GitGraphEvent::CountUpdated(graph_data.commit_data.len()), - )); } + cx.emit(RepositoryEvent::GraphEvent( + graph_data_key.clone(), + GitGraphEvent::CountUpdated(graph_data.commit_data.len()), + )); }); match &graph_data { @@ -5375,6 +5522,7 @@ impl Repository { options: Some(proto::commit::CommitOptions { amend: options.amend, signoff: options.signoff, + allow_empty: options.allow_empty, }), askpass_id, }) @@ -5485,7 +5633,7 @@ impl Repository { log::info!("head branch after scan is {branch:?}"); let snapshot = this.update(&mut cx, |this, cx| { this.snapshot.branch = branch; - cx.emit(RepositoryEvent::BranchChanged); + cx.emit(RepositoryEvent::HeadChanged); this.snapshot.clone() })?; if let Some(updates_tx) = updates_tx { @@ -5856,36 +6004,174 @@ impl Repository { }) } + fn create_worktree_with_start_point( + &mut self, + start_point: CreateWorktreeStartPoint, + path: PathBuf, + commit: Option, + ) -> oneshot::Receiver> { + if matches!( + &start_point, + CreateWorktreeStartPoint::Branched { name } if name.is_empty() + ) { + let (sender, receiver) = oneshot::channel(); + sender + .send(Err(anyhow!("branch name cannot be empty"))) + .ok(); + return receiver; + } + + let id = self.id; + let message = match &start_point { + CreateWorktreeStartPoint::Detached => "git worktree add (detached)".into(), + CreateWorktreeStartPoint::Branched { name } => { + format!("git worktree add: {name}").into() + } + }; + + self.send_job(Some(message), move |repo, _cx| async move { + let branch_name = match start_point { + CreateWorktreeStartPoint::Detached => None, + CreateWorktreeStartPoint::Branched { name } => Some(name), + }; + let remote_name = branch_name.clone().unwrap_or_default(); + + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + backend.create_worktree(branch_name, path, commit).await + } + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + client + .request(proto::GitCreateWorktree { + project_id: project_id.0, + repository_id: id.to_proto(), + name: remote_name, + directory: path.to_string_lossy().to_string(), + commit, + }) + .await?; + + Ok(()) + } + } + }) + } + pub fn create_worktree( &mut self, branch_name: String, path: PathBuf, commit: Option, ) -> oneshot::Receiver> { + self.create_worktree_with_start_point( + CreateWorktreeStartPoint::Branched { name: branch_name }, + path, + commit, + ) + } + + pub fn create_worktree_detached( + &mut self, + path: PathBuf, + commit: String, + ) -> oneshot::Receiver> { + self.create_worktree_with_start_point( + CreateWorktreeStartPoint::Detached, + path, + Some(commit), + ) + } + + pub fn head_sha(&mut self) -> oneshot::Receiver>> { let id = self.id; - self.send_job( - Some(format!("git worktree add: {}", branch_name).into()), - move |repo, _cx| async move { - match repo { - RepositoryState::Local(LocalRepositoryState { backend, .. }) => { - backend.create_worktree(branch_name, path, commit).await - } - RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { - client - .request(proto::GitCreateWorktree { - project_id: project_id.0, - repository_id: id.to_proto(), - name: branch_name, - directory: path.to_string_lossy().to_string(), - commit, - }) - .await?; + self.send_job(None, move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + Ok(backend.head_sha().await) + } + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + let response = client + .request(proto::GitGetHeadSha { + project_id: project_id.0, + repository_id: id.to_proto(), + }) + .await?; - Ok(()) - } + Ok(response.sha) } - }, - ) + } + }) + } + + pub fn update_ref( + &mut self, + ref_name: String, + commit: String, + ) -> oneshot::Receiver> { + self.send_job(None, move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + backend.update_ref(ref_name, commit).await + } + RepositoryState::Remote(_) => { + anyhow::bail!("update_ref is not supported for remote repositories") + } + } + }) + } + + pub fn delete_ref(&mut self, ref_name: String) -> oneshot::Receiver> { + self.send_job(None, move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + backend.delete_ref(ref_name).await + } + RepositoryState::Remote(_) => { + anyhow::bail!("delete_ref is not supported for remote repositories") + } + } + }) + } + + pub fn resolve_commit(&mut self, sha: String) -> oneshot::Receiver> { + self.send_job(None, move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + let results = backend.revparse_batch(vec![sha]).await?; + Ok(results.into_iter().next().flatten().is_some()) + } + RepositoryState::Remote(_) => { + anyhow::bail!("resolve_commit is not supported for remote repositories") + } + } + }) + } + + pub fn repair_worktrees(&mut self) -> oneshot::Receiver> { + self.send_job(None, move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + backend.repair_worktrees().await + } + RepositoryState::Remote(_) => { + anyhow::bail!("repair_worktrees is not supported for remote repositories") + } + } + }) + } + + pub fn commit_exists(&mut self, sha: String) -> oneshot::Receiver> { + self.send_job(None, move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + let results = backend.revparse_batch(vec![sha]).await?; + Ok(results.into_iter().next().flatten().is_some()) + } + RepositoryState::Remote(_) => { + anyhow::bail!("commit_exists is not supported for remote repositories") + } + } + }) } pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver> { @@ -6210,12 +6496,24 @@ impl Repository { } pub fn checkpoint(&mut self) -> oneshot::Receiver> { - self.send_job(None, |repo, _cx| async move { + let id = self.id; + self.send_job(None, move |repo, _cx| async move { match repo { RepositoryState::Local(LocalRepositoryState { backend, .. }) => { backend.checkpoint().await } - RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"), + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + let response = client + .request(proto::GitCreateCheckpoint { + project_id: project_id.0, + repository_id: id.to_proto(), + }) + .await?; + + Ok(GitRepositoryCheckpoint { + commit_sha: Oid::from_bytes(&response.commit_sha)?, + }) + } } }) } @@ -6224,12 +6522,22 @@ impl Repository { &mut self, checkpoint: GitRepositoryCheckpoint, ) -> oneshot::Receiver> { + let id = self.id; self.send_job(None, move |repo, _cx| async move { match repo { RepositoryState::Local(LocalRepositoryState { backend, .. }) => { backend.restore_checkpoint(checkpoint).await } - RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"), + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + client + .request(proto::GitRestoreCheckpoint { + project_id: project_id.0, + repository_id: id.to_proto(), + commit_sha: checkpoint.commit_sha.as_bytes().to_vec(), + }) + .await?; + Ok(()) + } } }) } @@ -6249,7 +6557,7 @@ impl Repository { .as_ref() .map(proto_to_commit_details); if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit { - cx.emit(RepositoryEvent::BranchChanged) + cx.emit(RepositoryEvent::HeadChanged) } self.snapshot.branch = new_branch; self.snapshot.head_commit = new_head_commit; @@ -6323,12 +6631,23 @@ impl Repository { left: GitRepositoryCheckpoint, right: GitRepositoryCheckpoint, ) -> oneshot::Receiver> { + let id = self.id; self.send_job(None, move |repo, _cx| async move { match repo { RepositoryState::Local(LocalRepositoryState { backend, .. }) => { backend.compare_checkpoints(left, right).await } - RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"), + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + let response = client + .request(proto::GitCompareCheckpoints { + project_id: project_id.0, + repository_id: id.to_proto(), + left_commit_sha: left.commit_sha.as_bytes().to_vec(), + right_commit_sha: right.commit_sha.as_bytes().to_vec(), + }) + .await?; + Ok(response.equal) + } } }) } @@ -6338,6 +6657,7 @@ impl Repository { base_checkpoint: GitRepositoryCheckpoint, target_checkpoint: GitRepositoryCheckpoint, ) -> oneshot::Receiver> { + let id = self.id; self.send_job(None, move |repo, _cx| async move { match repo { RepositoryState::Local(LocalRepositoryState { backend, .. }) => { @@ -6345,7 +6665,17 @@ impl Repository { .diff_checkpoints(base_checkpoint, target_checkpoint) .await } - RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"), + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + let response = client + .request(proto::GitDiffCheckpoints { + project_id: project_id.0, + repository_id: id.to_proto(), + base_commit_sha: base_checkpoint.commit_sha.as_bytes().to_vec(), + target_commit_sha: target_checkpoint.commit_sha.as_bytes().to_vec(), + }) + .await?; + Ok(response.diff) + } } }) } @@ -6428,7 +6758,7 @@ impl Repository { let state = RepositoryState::Local(state); let mut jobs = VecDeque::new(); loop { - while let Ok(Some(next_job)) = job_rx.try_next() { + while let Ok(next_job) = job_rx.try_recv() { jobs.push_back(next_job); } @@ -6464,7 +6794,7 @@ impl Repository { let state = RepositoryState::Remote(state); let mut jobs = VecDeque::new(); loop { - while let Ok(Some(next_job)) = job_rx.try_next() { + while let Ok(next_job) = job_rx.try_recv() { jobs.push_back(next_job); } @@ -7165,7 +7495,8 @@ async fn compute_snapshot( } }) .await?; - let branch = branches.into_iter().find(|branch| branch.is_head); + let branch = branches.iter().find(|branch| branch.is_head).cloned(); + let branch_list: Arc<[Branch]> = branches.into(); let linked_worktrees: Arc<[GitWorktree]> = all_worktrees .into_iter() @@ -7188,14 +7519,16 @@ async fn compute_snapshot( .await?; let snapshot = this.update(cx, |this, cx| { - let branch_changed = + let head_changed = branch != this.snapshot.branch || head_commit != this.snapshot.head_commit; + let branch_list_changed = *branch_list != *this.snapshot.branch_list; let worktrees_changed = *linked_worktrees != *this.snapshot.linked_worktrees; this.snapshot = RepositorySnapshot { id, work_directory_abs_path, branch, + branch_list: branch_list.clone(), head_commit, remote_origin_url, remote_upstream_url, @@ -7204,8 +7537,12 @@ async fn compute_snapshot( ..prev_snapshot }; - if branch_changed { - cx.emit(RepositoryEvent::BranchChanged); + if head_changed { + cx.emit(RepositoryEvent::HeadChanged); + } + + if branch_list_changed { + cx.emit(RepositoryEvent::BranchListChanged); } if worktrees_changed { diff --git a/crates/project/src/git_store/branch_diff.rs b/crates/project/src/git_store/branch_diff.rs index 3b8324fce8ffea7049838aeac09e831463dbd34e..dc7c8bf647585d9fcf1d5f92e0e976f86939a781 100644 --- a/crates/project/src/git_store/branch_diff.rs +++ b/crates/project/src/git_store/branch_diff.rs @@ -70,7 +70,7 @@ impl BranchDiff { } GitStoreEvent::RepositoryUpdated( event_repo_id, - RepositoryEvent::StatusesChanged | RepositoryEvent::BranchChanged, + RepositoryEvent::StatusesChanged | RepositoryEvent::HeadChanged, _, ) => this .repo diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 286d3a85f86173bff5d17d8d7c86d26464a04714..2f579f5a724db143bbd4b0f9853a217bd6b14655 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -4414,7 +4414,8 @@ impl LspStore { } worktree::Event::UpdatedGitRepositories(_) | worktree::Event::DeletedEntry(_) - | worktree::Event::Deleted => {} + | worktree::Event::Deleted + | worktree::Event::UpdatedRootRepoCommonDir => {} }) .detach() } diff --git a/crates/project/src/lsp_store/semantic_tokens.rs b/crates/project/src/lsp_store/semantic_tokens.rs index 7865e8f20ca0e4dbc9d06c2ffd808fe4090634ed..0f01c6350ece89569535dca571c28597ff77384b 100644 --- a/crates/project/src/lsp_store/semantic_tokens.rs +++ b/crates/project/src/lsp_store/semantic_tokens.rs @@ -585,7 +585,7 @@ async fn raw_to_buffer_semantic_tokens( } Some(BufferSemanticToken { - range: buffer_snapshot.anchor_range_around(start..end), + range: buffer_snapshot.anchor_range_inside(start..end), token_type: token.token_type, token_modifiers: token.token_modifiers, }) diff --git a/crates/project/src/manifest_tree.rs b/crates/project/src/manifest_tree.rs index 1ae5b0e809f3803c3f8858afb065637ba0a0f256..fb1b7e96e4a20370493e0837360a28583ffbbfc0 100644 --- a/crates/project/src/manifest_tree.rs +++ b/crates/project/src/manifest_tree.rs @@ -59,7 +59,7 @@ impl WorktreeRoots { let path = TriePath::from(entry.path.as_ref()); this.roots.remove(&path); } - WorktreeEvent::Deleted => {} + WorktreeEvent::Deleted | WorktreeEvent::UpdatedRootRepoCommonDir => {} } }), }) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 96b82a16930543028b7588a843433c6a70bf34e6..e8eef386e3b31988f55546d289de7de8cd345828 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1032,6 +1032,8 @@ impl DirectoryLister { } } +pub const CURRENT_PROJECT_FEATURES: &[&str] = &["new-style-anchors"]; + #[cfg(feature = "test-support")] pub const DEFAULT_COMPLETION_CONTEXT: CompletionContext = CompletionContext { trigger_kind: lsp::CompletionTriggerKind::INVOKED, @@ -1228,12 +1230,23 @@ impl Project { ) }); + let git_store = cx.new(|cx| { + GitStore::local( + &worktree_store, + buffer_store.clone(), + environment.clone(), + fs.clone(), + cx, + ) + }); + let task_store = cx.new(|cx| { TaskStore::local( buffer_store.downgrade(), worktree_store.clone(), toolchain_store.read(cx).as_language_toolchain_store(), environment.clone(), + git_store.clone(), cx, ) }); @@ -1269,16 +1282,6 @@ impl Project { ) }); - let git_store = cx.new(|cx| { - GitStore::local( - &worktree_store, - buffer_store.clone(), - environment.clone(), - fs.clone(), - cx, - ) - }); - let agent_server_store = cx.new(|cx| { AgentServerStore::local( node.clone(), @@ -1413,30 +1416,6 @@ impl Project { ) }); - let task_store = cx.new(|cx| { - TaskStore::remote( - buffer_store.downgrade(), - worktree_store.clone(), - toolchain_store.read(cx).as_language_toolchain_store(), - remote.read(cx).proto_client(), - REMOTE_SERVER_PROJECT_ID, - cx, - ) - }); - - let settings_observer = cx.new(|cx| { - SettingsObserver::new_remote( - fs.clone(), - worktree_store.clone(), - task_store.clone(), - Some(remote_proto.clone()), - false, - cx, - ) - }); - cx.subscribe(&settings_observer, Self::on_settings_observer_event) - .detach(); - let context_server_store = cx.new(|cx| { ContextServerStore::remote( rpc::proto::REMOTE_SERVER_PROJECT_ID, @@ -1501,6 +1480,31 @@ impl Project { ) }); + let task_store = cx.new(|cx| { + TaskStore::remote( + buffer_store.downgrade(), + worktree_store.clone(), + toolchain_store.read(cx).as_language_toolchain_store(), + remote.read(cx).proto_client(), + REMOTE_SERVER_PROJECT_ID, + git_store.clone(), + cx, + ) + }); + + let settings_observer = cx.new(|cx| { + SettingsObserver::new_remote( + fs.clone(), + worktree_store.clone(), + task_store.clone(), + Some(remote_proto.clone()), + false, + cx, + ) + }); + cx.subscribe(&settings_observer, Self::on_settings_observer_event) + .detach(); + let agent_server_store = cx.new(|_| { AgentServerStore::remote( REMOTE_SERVER_PROJECT_ID, @@ -1644,6 +1648,10 @@ impl Project { project_id: remote_id, committer_email: committer.email, committer_name: committer.name, + features: CURRENT_PROJECT_FEATURES + .iter() + .map(|s| s.to_string()) + .collect(), }) .await?; Self::from_join_project_response( @@ -1726,6 +1734,17 @@ impl Project { ) }); + let git_store = cx.new(|cx| { + GitStore::remote( + // In this remote case we pass None for the environment + &worktree_store, + buffer_store.clone(), + client.clone().into(), + remote_id, + cx, + ) + }); + let task_store = cx.new(|cx| { if run_tasks { TaskStore::remote( @@ -1734,6 +1753,7 @@ impl Project { Arc::new(EmptyToolchainStore), client.clone().into(), remote_id, + git_store.clone(), cx, ) } else { @@ -1752,17 +1772,6 @@ impl Project { ) }); - let git_store = cx.new(|cx| { - GitStore::remote( - // In this remote case we pass None for the environment - &worktree_store, - buffer_store.clone(), - client.clone().into(), - remote_id, - cx, - ) - }); - let agent_server_store = cx.new(|_cx| AgentServerStore::collab()); let replica_id = ReplicaId::new(response.payload.replica_id as u16); @@ -2340,6 +2349,22 @@ impl Project { .find(|tree| tree.read(cx).root_name() == root_name) } + pub fn project_group_key(&self, cx: &App) -> ProjectGroupKey { + let roots = self + .visible_worktrees(cx) + .map(|worktree| { + let snapshot = worktree.read(cx).snapshot(); + snapshot + .root_repo_common_dir() + .and_then(|dir| Some(dir.parent()?.to_path_buf())) + .unwrap_or(snapshot.abs_path().to_path_buf()) + }) + .collect::>(); + let host = self.remote_connection_options(cx); + let path_list = PathList::new(&roots); + ProjectGroupKey::new(host, path_list) + } + #[inline] pub fn worktree_root_names<'a>(&'a self, cx: &'a App) -> impl Iterator { self.visible_worktrees(cx) @@ -4738,6 +4763,19 @@ impl Project { }); } + pub fn remove_worktree_for_main_worktree_path( + &mut self, + path: impl AsRef, + cx: &mut Context, + ) { + let path = path.as_ref(); + self.worktree_store.update(cx, |worktree_store, cx| { + if let Some(worktree) = worktree_store.worktree_for_main_worktree_path(path, cx) { + worktree_store.remove_worktree(worktree.read(cx).id(), cx); + } + }); + } + fn add_worktree(&mut self, worktree: &Entity, cx: &mut Context) { self.worktree_store.update(cx, |worktree_store, cx| { worktree_store.add(worktree, cx); @@ -6009,6 +6047,80 @@ impl Project { } } +/// Identifies a project group by a set of paths the workspaces in this group +/// have. +/// +/// Paths are mapped to their main worktree path first so we can group +/// workspaces by main repos. +#[derive(PartialEq, Eq, Hash, Clone, Debug)] +pub struct ProjectGroupKey { + paths: PathList, + host: Option, +} + +impl ProjectGroupKey { + /// Creates a new `ProjectGroupKey` with the given path list. + /// + /// The path list should point to the git main worktree paths for a project. + pub fn new(host: Option, paths: PathList) -> Self { + Self { paths, host } + } + + pub fn display_name(&self) -> SharedString { + let mut names = Vec::with_capacity(self.paths.paths().len()); + for abs_path in self.paths.paths() { + if let Some(name) = abs_path.file_name() { + names.push(name.to_string_lossy().to_string()); + } + } + if names.is_empty() { + // TODO: Can we do something better in this case? + "Empty Workspace".into() + } else { + names.join(", ").into() + } + } + + pub fn path_list(&self) -> &PathList { + &self.paths + } + + pub fn display_name_from_suffixes( + &self, + path_detail_map: &std::collections::HashMap, + ) -> SharedString { + let mut names = Vec::with_capacity(self.paths.paths().len()); + for abs_path in self.paths.paths() { + let detail = path_detail_map.get(abs_path).copied().unwrap_or(0); + let suffix = path_suffix(abs_path, detail); + if !suffix.is_empty() { + names.push(suffix); + } + } + if names.is_empty() { + "Empty Workspace".into() + } else { + names.join(", ").into() + } + } + + pub fn host(&self) -> Option { + self.host.clone() + } +} + +pub fn path_suffix(path: &Path, detail: usize) -> String { + let components: Vec<_> = path + .components() + .filter_map(|component| match component { + std::path::Component::Normal(s) => Some(s.to_string_lossy()), + _ => None, + }) + .collect(); + let start = components.len().saturating_sub(detail + 1); + components[start..].join("/") +} + pub struct PathMatchCandidateSet { pub snapshot: Snapshot, pub include_ignored: bool, diff --git a/crates/project/src/search.rs b/crates/project/src/search.rs index 3a554eb3da1557849e18846b09a7787ab939f46d..cd4702d04863c2fc3026700b2d6653e1db24dbff 100644 --- a/crates/project/src/search.rs +++ b/crates/project/src/search.rs @@ -620,4 +620,56 @@ impl SearchQuery { Self::Text { .. } => None, } } + + pub fn search_str(&self, text: &str) -> Vec> { + if self.as_str().is_empty() { + return Vec::new(); + } + + let is_word_char = |c: char| c.is_alphanumeric() || c == '_'; + + let mut matches = Vec::new(); + match self { + Self::Text { + search, whole_word, .. + } => { + for mat in search.find_iter(text.as_bytes()) { + if *whole_word { + let prev_char = text[..mat.start()].chars().last(); + let next_char = text[mat.end()..].chars().next(); + if prev_char.is_some_and(&is_word_char) + || next_char.is_some_and(&is_word_char) + { + continue; + } + } + matches.push(mat.start()..mat.end()); + } + } + Self::Regex { + regex, + multiline, + one_match_per_line, + .. + } => { + if *multiline { + for mat in regex.find_iter(text).flatten() { + matches.push(mat.start()..mat.end()); + } + } else { + let mut line_offset = 0; + for line in text.split('\n') { + for mat in regex.find_iter(line).flatten() { + matches.push((line_offset + mat.start())..(line_offset + mat.end())); + if *one_match_per_line { + break; + } + } + line_offset += line.len() + 1; + } + } + } + } + matches + } } diff --git a/crates/project/src/task_inventory.rs b/crates/project/src/task_inventory.rs index 46999b2b7024c6035732b64de30a3e64cd65460c..663380181015d52c9a91f1a23c7bd0d48d8ac57d 100644 --- a/crates/project/src/task_inventory.rs +++ b/crates/project/src/task_inventory.rs @@ -21,14 +21,14 @@ use lsp::{LanguageServerId, LanguageServerName}; use paths::{debug_task_file_name, task_file_name}; use settings::{InvalidSettingsError, parse_json_with_comments}; use task::{ - DebugScenario, ResolvedTask, SharedTaskContext, TaskContext, TaskId, TaskTemplate, + DebugScenario, ResolvedTask, SharedTaskContext, TaskContext, TaskHook, TaskId, TaskTemplate, TaskTemplates, TaskVariables, VariableName, }; use text::{BufferId, Point, ToPoint}; use util::{NumericPrefixWithSuffix, ResultExt as _, post_inc, rel_path::RelPath}; use worktree::WorktreeId; -use crate::{task_store::TaskSettingsLocation, worktree_store::WorktreeStore}; +use crate::{git_store::GitStore, task_store::TaskSettingsLocation, worktree_store::WorktreeStore}; #[derive(Clone, Debug, Default)] pub struct DebugScenarioContext { @@ -644,6 +644,19 @@ impl Inventory { self.last_scheduled_tasks.retain(|(_, task)| &task.id != id); } + /// Returns all task templates (worktree and global) that have at least one + /// hook in the provided set. + pub fn templates_with_hooks( + &self, + hooks: &HashSet, + worktree: WorktreeId, + ) -> Vec<(TaskSourceKind, TaskTemplate)> { + self.worktree_templates_from_settings(worktree) + .chain(self.global_templates_from_settings()) + .filter(|(_, template)| !template.hooks.is_disjoint(hooks)) + .collect() + } + fn global_templates_from_settings( &self, ) -> impl '_ + Iterator { @@ -918,11 +931,15 @@ fn task_variables_preference(task: &ResolvedTask) -> Reverse { /// Applied as a base for every custom [`ContextProvider`] unless explicitly oped out. pub struct BasicContextProvider { worktree_store: Entity, + git_store: Entity, } impl BasicContextProvider { - pub fn new(worktree_store: Entity) -> Self { - Self { worktree_store } + pub fn new(worktree_store: Entity, git_store: Entity) -> Self { + Self { + worktree_store, + git_store, + } } } @@ -1002,6 +1019,19 @@ impl ContextProvider for BasicContextProvider { } } + if let Some(worktree_id) = location.buffer.read(cx).file().map(|f| f.worktree_id(cx)) { + if let Some(path) = self + .git_store + .read(cx) + .original_repo_path_for_worktree(worktree_id, cx) + { + task_variables.insert( + VariableName::MainGitWorktree, + path.to_string_lossy().into_owned(), + ); + } + } + if let Some(current_file) = current_file { let path = current_file.abs_path(cx); if let Some(filename) = path.file_name().and_then(|f| f.to_str()) { diff --git a/crates/project/src/task_store.rs b/crates/project/src/task_store.rs index 7aec460aeb9917eb9c1c58668ece4a10033a7ac9..5b91a3a8901d63e7311fb7ec81a69767b68e02d4 100644 --- a/crates/project/src/task_store.rs +++ b/crates/project/src/task_store.rs @@ -19,7 +19,7 @@ use util::ResultExt; use crate::{ BasicContextProvider, Inventory, ProjectEnvironment, buffer_store::BufferStore, - worktree_store::WorktreeStore, + git_store::GitStore, worktree_store::WorktreeStore, }; // platform-dependent warning @@ -33,6 +33,7 @@ pub struct StoreState { task_inventory: Entity, buffer_store: WeakEntity, worktree_store: Entity, + git_store: Entity, toolchain_store: Arc, } @@ -163,6 +164,7 @@ impl TaskStore { worktree_store: Entity, toolchain_store: Arc, environment: Entity, + git_store: Entity, cx: &mut Context, ) -> Self { Self::Functional(StoreState { @@ -172,6 +174,7 @@ impl TaskStore { }, task_inventory: Inventory::new(cx), buffer_store, + git_store, toolchain_store, worktree_store, }) @@ -183,6 +186,7 @@ impl TaskStore { toolchain_store: Arc, upstream_client: AnyProtoClient, project_id: u64, + git_store: Entity, cx: &mut Context, ) -> Self { Self::Functional(StoreState { @@ -192,6 +196,7 @@ impl TaskStore { }, task_inventory: Inventory::new(cx), buffer_store, + git_store, toolchain_store, worktree_store, }) @@ -207,6 +212,7 @@ impl TaskStore { TaskStore::Functional(state) => match &state.mode { StoreMode::Local { environment, .. } => local_task_context_for_location( state.worktree_store.clone(), + state.git_store.clone(), state.toolchain_store.clone(), environment.clone(), captured_variables, @@ -220,6 +226,7 @@ impl TaskStore { *project_id, upstream_client.clone(), state.worktree_store.clone(), + state.git_store.clone(), captured_variables, location, state.toolchain_store.clone(), @@ -302,6 +309,7 @@ impl TaskStore { fn local_task_context_for_location( worktree_store: Entity, + git_store: Entity, toolchain_store: Arc, environment: Entity, captured_variables: TaskVariables, @@ -329,7 +337,7 @@ fn local_task_context_for_location( worktree_store.clone(), location, project_env.clone(), - BasicContextProvider::new(worktree_store), + BasicContextProvider::new(worktree_store, git_store), toolchain_store, cx, ) @@ -351,6 +359,7 @@ fn remote_task_context_for_location( project_id: u64, upstream_client: AnyProtoClient, worktree_store: Entity, + git_store: Entity, captured_variables: TaskVariables, location: Location, toolchain_store: Arc, @@ -362,7 +371,7 @@ fn remote_task_context_for_location( .update(|cx| { let worktree_root = worktree_root(&worktree_store, &location, cx); - BasicContextProvider::new(worktree_store).build_context( + BasicContextProvider::new(worktree_store, git_store).build_context( &TaskVariables::default(), ContextLocation { fs: None, diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 92f7db453a81c6224455002b7811f2e6945f2a82..7ca721ddb50c3f216ed630665e547b60ce4d52bf 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -812,6 +812,7 @@ impl WorktreeStore { // The worktree root itself has been deleted (for single-file worktrees) // The worktree will be removed via the observe_release callback } + worktree::Event::UpdatedRootRepoCommonDir => {} } }) .detach(); @@ -849,6 +850,21 @@ impl WorktreeStore { self.send_project_updates(cx); } + pub fn worktree_for_main_worktree_path( + &self, + path: &Path, + cx: &App, + ) -> Option> { + self.visible_worktrees(cx).find(|worktree| { + let worktree = worktree.read(cx); + if let Some(common_dir) = worktree.root_repo_common_dir() { + common_dir.parent() == Some(path) + } else { + worktree.abs_path().as_ref() == path + } + }) + } + pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) { self.worktrees_reordered = worktrees_reordered; } diff --git a/crates/project/tests/integration/debugger.rs b/crates/project/tests/integration/debugger.rs index 6cdc126d9750ae38f36e27879e5e9b635295015c..61bba78c74baec2e48b172043b3b504ccf32dba9 100644 --- a/crates/project/tests/integration/debugger.rs +++ b/crates/project/tests/integration/debugger.rs @@ -23,6 +23,7 @@ mod go_locator { show_summary: true, show_command: true, save: SaveStrategy::default(), + hooks: Default::default(), }; let scenario = locator @@ -51,6 +52,7 @@ mod go_locator { show_summary: true, show_command: true, save: SaveStrategy::default(), + hooks: Default::default(), }; let scenario = locator @@ -190,6 +192,7 @@ mod go_locator { show_summary: true, show_command: true, save: SaveStrategy::default(), + hooks: Default::default(), }; let scenario = locator @@ -225,6 +228,7 @@ mod python_locator { show_summary: false, show_command: false, save: task::SaveStrategy::default(), + hooks: Default::default(), }; let expected_scenario = DebugScenario { diff --git a/crates/project/tests/integration/lsp_store.rs b/crates/project/tests/integration/lsp_store.rs index 91d5ca1697255a07c0bc9bb37869d87773792297..7d266ff1365485032458d6de033b57f106602869 100644 --- a/crates/project/tests/integration/lsp_store.rs +++ b/crates/project/tests/integration/lsp_store.rs @@ -43,7 +43,7 @@ fn test_multi_len_chars_normalization() { let mut label = CodeLabel::new( "myElˇ (parameter) myElˇ: {\n foo: string;\n}".to_string(), 0..6, - vec![(0..6, HighlightId(1))], + vec![(0..6, HighlightId::new(1))], ); ensure_uniform_list_compatible_label(&mut label); assert_eq!( @@ -51,7 +51,7 @@ fn test_multi_len_chars_normalization() { CodeLabel::new( "myElˇ (parameter) myElˇ: { foo: string; }".to_string(), 0..6, - vec![(0..6, HighlightId(1))], + vec![(0..6, HighlightId::new(1))], ) ); } diff --git a/crates/project/tests/integration/project_tests.rs b/crates/project/tests/integration/project_tests.rs index 8603a904acd2c0cd52fcdc9d102be0f2efeb0636..d6c2ce37c9e60e17bd43c3f6c3ad10cde52b4bec 100644 --- a/crates/project/tests/integration/project_tests.rs +++ b/crates/project/tests/integration/project_tests.rs @@ -1771,7 +1771,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { DiagnosticSet::from_sorted_entries( vec![DiagnosticEntry { diagnostic: Default::default(), - range: Anchor::MIN..Anchor::MAX, + range: Anchor::min_max_range_for_buffer(buffer.remote_id()), }], &buffer.snapshot(), ), @@ -4448,7 +4448,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) { // Assert no new language server started cx.executor().run_until_parked(); - assert!(fake_servers.try_next().is_err()); + assert!(fake_servers.try_recv().is_err()); assert_eq!(definitions.len(), 1); let definition = definitions.pop().unwrap(); @@ -8525,9 +8525,10 @@ async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) { unstaged_diff.update(cx, |unstaged_diff, cx| { let snapshot = buffer.read(cx).snapshot(); assert_hunks( - unstaged_diff - .snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + unstaged_diff.snapshot(cx).hunks_intersecting_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + &snapshot, + ), &snapshot, &unstaged_diff.base_text(cx).text(), &[( @@ -8616,8 +8617,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) { diff_1.update(cx, |diff, cx| { let snapshot = buffer_1.read(cx).snapshot(); assert_hunks( - diff.snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + diff.snapshot(cx).hunks_intersecting_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + &snapshot, + ), &snapshot, &diff.base_text_string(cx).unwrap(), &[ @@ -8658,8 +8661,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) { diff_1.update(cx, |diff, cx| { let snapshot = buffer_1.read(cx).snapshot(); assert_hunks( - diff.snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + diff.snapshot(cx).hunks_intersecting_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + &snapshot, + ), &snapshot, &diff.base_text(cx).text(), &[( @@ -8688,8 +8693,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) { diff_2.update(cx, |diff, cx| { let snapshot = buffer_2.read(cx).snapshot(); assert_hunks( - diff.snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + diff.snapshot(cx).hunks_intersecting_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + &snapshot, + ), &snapshot, &diff.base_text_string(cx).unwrap(), &[( @@ -8710,8 +8717,10 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) { diff_2.update(cx, |diff, cx| { let snapshot = buffer_2.read(cx).snapshot(); assert_hunks( - diff.snapshot(cx) - .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot), + diff.snapshot(cx).hunks_intersecting_range( + Anchor::min_max_range_for_buffer(snapshot.remote_id()), + &snapshot, + ), &snapshot, &diff.base_text_string(cx).unwrap(), &[( @@ -11152,7 +11161,7 @@ async fn test_odd_events_for_ignored_dirs( assert_eq!( repository_updates.lock().drain(..).collect::>(), vec![ - RepositoryEvent::BranchChanged, + RepositoryEvent::HeadChanged, RepositoryEvent::StatusesChanged, RepositoryEvent::StatusesChanged, ], diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index e9062364fc73ed6e266e3f8904be51eaaf5b6535..c2f1bb7131ad31ea75aee84bad17b7971d489a09 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -7126,7 +7126,7 @@ impl Render for ProjectPanel { .workspace .update(cx, |workspace, cx| { workspace.open_workspace_for_paths( - OpenMode::Replace, + OpenMode::Activate, external_paths.paths().to_owned(), window, cx, diff --git a/crates/project_symbols/src/project_symbols.rs b/crates/project_symbols/src/project_symbols.rs index 84b92f3eaa4f0216b881526b3aac42f8980ffe78..931e332d93d869bc31909643190d5b35f32409dc 100644 --- a/crates/project_symbols/src/project_symbols.rs +++ b/crates/project_symbols/src/project_symbols.rs @@ -140,11 +140,20 @@ impl PickerDelegate for ProjectSymbolsDelegate { ); editor.update(cx, |editor, cx| { + let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx); + let Some(buffer_snapshot) = multibuffer_snapshot.as_singleton() else { + return; + }; + let text_anchor = buffer_snapshot.anchor_before(position); + let Some(anchor) = multibuffer_snapshot.anchor_in_buffer(text_anchor) + else { + return; + }; editor.change_selections( SelectionEffects::scroll(Autoscroll::center()), window, cx, - |s| s.select_ranges([position..position]), + |s| s.select_ranges([anchor..anchor]), ); }); })?; diff --git a/crates/proto/proto/buffer.proto b/crates/proto/proto/buffer.proto index 01f4bda9e9f450ed65d4f6cb8dc9abc7c35451dd..69bd844ee743ef9038beb25b98b9b31ffb130b2c 100644 --- a/crates/proto/proto/buffer.proto +++ b/crates/proto/proto/buffer.proto @@ -212,10 +212,15 @@ message Selection { } message EditorAnchor { - uint64 excerpt_id = 1; + optional uint64 excerpt_id = 1; Anchor anchor = 2; } +message PathKey { + optional uint64 sort_prefix = 1; + string path = 2; +} + enum CursorShape { CursorBar = 0; CursorBlock = 1; diff --git a/crates/proto/proto/call.proto b/crates/proto/proto/call.proto index 4d2bf62eade7aaf633ea899cd106e8d9cb3be25d..71351fb74c5834fe0b1650f22e851c21cd752466 100644 --- a/crates/proto/proto/call.proto +++ b/crates/proto/proto/call.proto @@ -174,6 +174,7 @@ message ShareProject { reserved 3; bool is_ssh_project = 4; optional bool windows_paths = 5; + repeated string features = 6; } message ShareProjectResponse { @@ -193,6 +194,7 @@ message JoinProject { uint64 project_id = 1; optional string committer_email = 2; optional string committer_name = 3; + repeated string features = 4; } message JoinProjectResponse { @@ -204,6 +206,7 @@ message JoinProjectResponse { repeated string language_server_capabilities = 8; ChannelRole role = 6; bool windows_paths = 9; + repeated string features = 10; reserved 7; } @@ -222,6 +225,7 @@ message UpdateWorktree { uint64 scan_id = 8; bool is_last_update = 9; string abs_path = 10; + optional string root_repo_common_dir = 11; } // deprecated @@ -359,6 +363,8 @@ message UpdateView { reserved 7; double scroll_x = 8; double scroll_y = 9; + repeated PathExcerpts updated_paths = 10; + repeated uint64 deleted_buffers = 11; } } @@ -385,6 +391,7 @@ message View { reserved 8; double scroll_x = 9; double scroll_y = 10; + repeated PathExcerpts path_excerpts = 11; } message ChannelView { @@ -407,6 +414,19 @@ message Excerpt { Anchor primary_end = 6; } +message ExcerptRange { + Anchor context_start = 1; + Anchor context_end = 2; + Anchor primary_start = 3; + Anchor primary_end = 4; +} + +message PathExcerpts { + PathKey path_key = 1; + uint64 buffer_id = 2; + repeated ExcerptRange ranges = 3; +} + message Contact { uint64 user_id = 1; bool online = 2; diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto index cb878cade726002e7e09670cf7c190880d8e66cb..9324feb21b1f50ac1041ed0afc8b59cb9b7fe2c6 100644 --- a/crates/proto/proto/git.proto +++ b/crates/proto/proto/git.proto @@ -403,6 +403,7 @@ message Commit { message CommitOptions { bool amend = 1; bool signoff = 2; + bool allow_empty = 3; } } @@ -567,6 +568,15 @@ message GitGetWorktrees { uint64 repository_id = 2; } +message GitGetHeadSha { + uint64 project_id = 1; + uint64 repository_id = 2; +} + +message GitGetHeadShaResponse { + optional string sha = 1; +} + message GitWorktreesResponse { repeated Worktree worktrees = 1; } @@ -586,6 +596,43 @@ message GitCreateWorktree { optional string commit = 5; } +message GitCreateCheckpoint { + uint64 project_id = 1; + uint64 repository_id = 2; +} + +message GitCreateCheckpointResponse { + bytes commit_sha = 1; +} + +message GitRestoreCheckpoint { + uint64 project_id = 1; + uint64 repository_id = 2; + bytes commit_sha = 3; +} + +message GitCompareCheckpoints { + uint64 project_id = 1; + uint64 repository_id = 2; + bytes left_commit_sha = 3; + bytes right_commit_sha = 4; +} + +message GitCompareCheckpointsResponse { + bool equal = 1; +} + +message GitDiffCheckpoints { + uint64 project_id = 1; + uint64 repository_id = 2; + bytes base_commit_sha = 3; + bytes target_commit_sha = 4; +} + +message GitDiffCheckpointsResponse { + string diff = 1; +} + message GitRemoveWorktree { uint64 project_id = 1; uint64 repository_id = 2; diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index d165bcb9529a41294d2bc25572f454c425f8c3f0..8b62754d7af40b7c4f5e1a87ad42899d682ba453 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -467,7 +467,16 @@ message Envelope { SpawnKernelResponse spawn_kernel_response = 427; KillKernel kill_kernel = 428; GitRemoveWorktree git_remove_worktree = 431; - GitRenameWorktree git_rename_worktree = 432; // current max + GitRenameWorktree git_rename_worktree = 432; + GitCreateCheckpoint git_create_checkpoint = 433; + GitCreateCheckpointResponse git_create_checkpoint_response = 434; + GitRestoreCheckpoint git_restore_checkpoint = 435; + GitCompareCheckpoints git_compare_checkpoints = 436; + GitCompareCheckpointsResponse git_compare_checkpoints_response = 437; + GitDiffCheckpoints git_diff_checkpoints = 438; + GitDiffCheckpointsResponse git_diff_checkpoints_response = 439; + GitGetHeadSha git_get_head_sha = 440; + GitGetHeadShaResponse git_get_head_sha_response = 441; // current max } reserved 87 to 88; diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 8c72fa08c57755dc45b9658db441a037d0a9fe2e..b77bd02313c13a9b04eb7762a97f9e77ac8cbaf8 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -294,6 +294,13 @@ messages!( (GitCommitDetails, Background), (GitFileHistory, Background), (GitFileHistoryResponse, Background), + (GitCreateCheckpoint, Background), + (GitCreateCheckpointResponse, Background), + (GitRestoreCheckpoint, Background), + (GitCompareCheckpoints, Background), + (GitCompareCheckpointsResponse, Background), + (GitDiffCheckpoints, Background), + (GitDiffCheckpointsResponse, Background), (SetIndexText, Background), (Push, Background), (Fetch, Background), @@ -344,6 +351,8 @@ messages!( (NewExternalAgentVersionAvailable, Background), (RemoteStarted, Background), (GitGetWorktrees, Background), + (GitGetHeadSha, Background), + (GitGetHeadShaResponse, Background), (GitWorktreesResponse, Background), (GitCreateWorktree, Background), (GitRemoveWorktree, Background), @@ -514,6 +523,10 @@ request_messages!( (RegisterBufferWithLanguageServers, Ack), (GitShow, GitCommitDetails), (GitFileHistory, GitFileHistoryResponse), + (GitCreateCheckpoint, GitCreateCheckpointResponse), + (GitRestoreCheckpoint, Ack), + (GitCompareCheckpoints, GitCompareCheckpointsResponse), + (GitDiffCheckpoints, GitDiffCheckpointsResponse), (GitReset, Ack), (GitDeleteBranch, Ack), (GitCheckoutFiles, Ack), @@ -547,6 +560,7 @@ request_messages!( (GetContextServerCommand, ContextServerCommand), (RemoteStarted, Ack), (GitGetWorktrees, GitWorktreesResponse), + (GitGetHeadSha, GitGetHeadShaResponse), (GitCreateWorktree, Ack), (GitRemoveWorktree, Ack), (GitRenameWorktree, Ack), @@ -696,6 +710,10 @@ entity_messages!( RegisterBufferWithLanguageServers, GitShow, GitFileHistory, + GitCreateCheckpoint, + GitRestoreCheckpoint, + GitCompareCheckpoints, + GitDiffCheckpoints, GitReset, GitDeleteBranch, GitCheckoutFiles, @@ -734,6 +752,7 @@ entity_messages!( ExternalAgentLoadingStatusUpdated, NewExternalAgentVersionAvailable, GitGetWorktrees, + GitGetHeadSha, GitCreateWorktree, GitRemoveWorktree, GitRenameWorktree, @@ -866,6 +885,7 @@ pub fn split_worktree_update(mut message: UpdateWorktree) -> impl Iterator String { } pub fn suggest_on_worktree_updated( + workspace: &mut Workspace, worktree_id: WorktreeId, updated_entries: &UpdatedEntriesSet, project: &gpui::Entity, window: &mut Window, cx: &mut Context, ) { + let cli_auto_open = workspace.open_in_dev_container(); + let devcontainer_updated = updated_entries.iter().any(|(path, _, _)| { path.as_ref() == devcontainer_dir_path() || path.as_ref() == devcontainer_json_path() }); - if !devcontainer_updated { + if !devcontainer_updated && !cli_auto_open { return; } @@ -54,7 +57,35 @@ pub fn suggest_on_worktree_updated( return; } - if find_configs_in_snapshot(worktree).is_empty() { + let has_configs = !find_configs_in_snapshot(worktree).is_empty(); + + if cli_auto_open { + workspace.set_open_in_dev_container(false); + let task = cx.spawn_in(window, async move |workspace, cx| { + let scans_complete = + workspace.update(cx, |workspace, cx| workspace.worktree_scans_complete(cx))?; + scans_complete.await; + + workspace.update_in(cx, |workspace, window, cx| { + let has_configs = workspace + .project() + .read(cx) + .worktrees(cx) + .any(|wt| !find_configs_in_snapshot(wt.read(cx)).is_empty()); + if has_configs { + cx.on_next_frame(window, move |_workspace, window, cx| { + window.dispatch_action(Box::new(zed_actions::OpenDevContainer), cx); + }); + } else { + log::warn!("--dev-container: no devcontainer configuration found in project"); + } + }) + }); + workspace.set_dev_container_task(task); + return; + } + + if !has_configs { return; } diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 7194e8868fd2a0015edd5c18c96f2fe164206fb7..e3bfc0dc08c95c0ce57b818e50965433a6c6bc98 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -52,7 +52,10 @@ use workspace::{ }; use zed_actions::{OpenDevContainer, OpenRecent, OpenRemote}; -actions!(recent_projects, [ToggleActionsMenu]); +actions!( + recent_projects, + [ToggleActionsMenu, RemoveSelected, AddToWorkspace,] +); #[derive(Clone, Debug)] pub struct RecentProjectEntry { @@ -354,7 +357,6 @@ pub fn init(cx: &mut App) { .update(cx, |multi_workspace, window, cx| { let sibling_workspace_ids: HashSet = multi_workspace .workspaces() - .iter() .filter_map(|ws| ws.read(cx).database_id()) .collect(); @@ -472,11 +474,12 @@ pub fn init(cx: &mut App) { cx.subscribe_in( workspace.project(), window, - move |_, project, event, window, cx| { + move |workspace, project, event, window, cx| { if let project::Event::WorktreeUpdatedEntries(worktree_id, updated_entries) = event { dev_container_suggest::suggest_on_worktree_updated( + workspace, *worktree_id, updated_entries, project, @@ -684,6 +687,79 @@ impl RecentProjects { } }); } + + fn handle_remove_selected( + &mut self, + _: &RemoveSelected, + window: &mut Window, + cx: &mut Context, + ) { + self.picker.update(cx, |picker, cx| { + let ix = picker.delegate.selected_index; + + match picker.delegate.filtered_entries.get(ix) { + Some(ProjectPickerEntry::OpenFolder { index, .. }) => { + if let Some(folder) = picker.delegate.open_folders.get(*index) { + let worktree_id = folder.worktree_id; + let Some(workspace) = picker.delegate.workspace.upgrade() else { + return; + }; + workspace.update(cx, |workspace, cx| { + let project = workspace.project().clone(); + project.update(cx, |project, cx| { + project.remove_worktree(worktree_id, cx); + }); + }); + picker.delegate.open_folders = get_open_folders(workspace.read(cx), cx); + let query = picker.query(cx); + picker.update_matches(query, window, cx); + } + } + Some(ProjectPickerEntry::OpenProject(hit)) => { + if let Some((workspace_id, ..)) = + picker.delegate.workspaces.get(hit.candidate_id) + { + let workspace_id = *workspace_id; + picker + .delegate + .remove_sibling_workspace(workspace_id, window, cx); + let query = picker.query(cx); + picker.update_matches(query, window, cx); + } + } + Some(ProjectPickerEntry::RecentProject(_)) => { + picker.delegate.delete_recent_project(ix, window, cx); + } + _ => {} + } + }); + } + + fn handle_add_to_workspace( + &mut self, + _: &AddToWorkspace, + window: &mut Window, + cx: &mut Context, + ) { + self.picker.update(cx, |picker, cx| { + let ix = picker.delegate.selected_index; + + if let Some(ProjectPickerEntry::RecentProject(hit)) = + picker.delegate.filtered_entries.get(ix) + { + if let Some((_, location, paths, _)) = + picker.delegate.workspaces.get(hit.candidate_id) + { + if matches!(location, SerializedWorkspaceLocation::Local) { + let paths_to_add = paths.paths().to_vec(); + picker + .delegate + .add_project_to_workspace(paths_to_add, window, cx); + } + } + } + }); + } } impl EventEmitter for RecentProjects {} @@ -699,6 +775,8 @@ impl Render for RecentProjects { v_flex() .key_context("RecentProjects") .on_action(cx.listener(Self::handle_toggle_open_menu)) + .on_action(cx.listener(Self::handle_remove_selected)) + .on_action(cx.listener(Self::handle_add_to_workspace)) .w(rems(self.rem_width)) .child(self.picker.clone()) } @@ -1034,7 +1112,6 @@ impl PickerDelegate for RecentProjectsDelegate { .update(cx, |multi_workspace, window, cx| { let workspace = multi_workspace .workspaces() - .iter() .find(|ws| ws.read(cx).database_id() == Some(workspace_id)) .cloned(); if let Some(workspace) = workspace { @@ -1081,7 +1158,7 @@ impl PickerDelegate for RecentProjectsDelegate { .update(cx, |multi_workspace, window, cx| { multi_workspace.open_project( paths, - OpenMode::Replace, + OpenMode::Activate, window, cx, ) @@ -1364,7 +1441,6 @@ impl PickerDelegate for RecentProjectsDelegate { ) } ProjectPickerEntry::RecentProject(hit) => { - let popover_style = matches!(self.style, ProjectPickerStyle::Popover); let (_, location, paths, _) = self.workspaces.get(hit.candidate_id)?; let is_local = matches!(location, SerializedWorkspaceLocation::Local); let paths_to_add = paths.paths().to_vec(); @@ -1432,28 +1508,26 @@ impl PickerDelegate for RecentProjectsDelegate { }), ) }) - .when(popover_style, |this| { - this.child( - IconButton::new("open_new_window", IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) - .tooltip({ - move |_, cx| { - Tooltip::for_action_in( - "Open Project in New Window", - &menu::SecondaryConfirm, - &focus_handle, - cx, - ) - } - }) - .on_click(cx.listener(move |this, _event, window, cx| { - cx.stop_propagation(); - window.prevent_default(); - this.delegate.set_selected_index(ix, window, cx); - this.delegate.confirm(true, window, cx); - })), - ) - }) + .child( + IconButton::new("open_new_window", IconName::ArrowUpRight) + .icon_size(IconSize::XSmall) + .tooltip({ + move |_, cx| { + Tooltip::for_action_in( + "Open Project in New Window", + &menu::SecondaryConfirm, + &focus_handle, + cx, + ) + } + }) + .on_click(cx.listener(move |this, _event, window, cx| { + cx.stop_propagation(); + window.prevent_default(); + this.delegate.set_selected_index(ix, window, cx); + this.delegate.confirm(true, window, cx); + })), + ) .child( IconButton::new("delete", IconName::Close) .icon_size(IconSize::Small) @@ -1518,9 +1592,7 @@ impl PickerDelegate for RecentProjectsDelegate { .border_t_1() .border_color(cx.theme().colors().border_variant) .child({ - let open_action = workspace::Open { - create_new_window: self.create_new_window, - }; + let open_action = workspace::Open::default(); Button::new("open_local_folder", "Open Local Project") .key_binding(KeyBinding::for_action_in(&open_action, &focus_handle, cx)) .on_click(move |_, window, cx| { @@ -1551,6 +1623,44 @@ impl PickerDelegate for RecentProjectsDelegate { ); } + let selected_entry = self.filtered_entries.get(self.selected_index); + + let secondary_footer_actions: Option = match selected_entry { + Some(ProjectPickerEntry::OpenFolder { .. } | ProjectPickerEntry::OpenProject(_)) => { + let label = if matches!(selected_entry, Some(ProjectPickerEntry::OpenFolder { .. })) + { + "Remove Folder" + } else { + "Remove from Window" + }; + Some( + Button::new("remove_selected", label) + .key_binding(KeyBinding::for_action_in( + &RemoveSelected, + &focus_handle, + cx, + )) + .on_click(|_, window, cx| { + window.dispatch_action(RemoveSelected.boxed_clone(), cx) + }) + .into_any_element(), + ) + } + Some(ProjectPickerEntry::RecentProject(_)) => Some( + Button::new("delete_recent", "Delete") + .key_binding(KeyBinding::for_action_in( + &RemoveSelected, + &focus_handle, + cx, + )) + .on_click(|_, window, cx| { + window.dispatch_action(RemoveSelected.boxed_clone(), cx) + }) + .into_any_element(), + ), + _ => None, + }; + Some( h_flex() .flex_1() @@ -1559,6 +1669,9 @@ impl PickerDelegate for RecentProjectsDelegate { .justify_end() .border_t_1() .border_color(cx.theme().colors().border_variant) + .when_some(secondary_footer_actions, |this, actions| { + this.child(actions) + }) .map(|this| { if is_already_open_entry { this.child( @@ -1607,7 +1720,7 @@ impl PickerDelegate for RecentProjectsDelegate { y: px(-2.0), }) .trigger( - Button::new("actions-trigger", "Actions…") + Button::new("actions-trigger", "Actions") .selected_style(ButtonStyle::Tinted(TintColor::Accent)) .key_binding(KeyBinding::for_action_in( &ToggleActionsMenu, @@ -1617,16 +1730,32 @@ impl PickerDelegate for RecentProjectsDelegate { ) .menu({ let focus_handle = focus_handle.clone(); - let create_new_window = self.create_new_window; + let show_add_to_workspace = match selected_entry { + Some(ProjectPickerEntry::RecentProject(hit)) => self + .workspaces + .get(hit.candidate_id) + .map(|(_, loc, ..)| { + matches!(loc, SerializedWorkspaceLocation::Local) + }) + .unwrap_or(false), + _ => false, + }; move |window, cx| { Some(ContextMenu::build(window, cx, { let focus_handle = focus_handle.clone(); move |menu, _, _| { menu.context(focus_handle) + .when(show_add_to_workspace, |menu| { + menu.action( + "Add to Workspace", + AddToWorkspace.boxed_clone(), + ) + .separator() + }) .action( "Open Local Project", - workspace::Open { create_new_window }.boxed_clone(), + workspace::Open::default().boxed_clone(), ) .action( "Open Remote Project", @@ -1801,7 +1930,6 @@ impl RecentProjectsDelegate { .update(cx, |multi_workspace, window, cx| { let workspace = multi_workspace .workspaces() - .iter() .find(|ws| ws.read(cx).database_id() == Some(workspace_id)) .cloned(); if let Some(workspace) = workspace { @@ -1872,7 +2000,7 @@ mod tests { use std::path::PathBuf; use editor::Editor; - use gpui::{TestAppContext, UpdateGlobal, WindowHandle}; + use gpui::{TestAppContext, UpdateGlobal, VisualTestContext, WindowHandle}; use serde_json::json; use settings::SettingsStore; @@ -1924,6 +2052,11 @@ mod tests { assert_eq!(cx.update(|cx| cx.windows().len()), 1); let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::().unwrap()); + multi_workspace + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .unwrap(); multi_workspace .update(cx, |multi_workspace, _, cx| { assert!(!multi_workspace.workspace().read(cx).is_edited()) @@ -1991,14 +2124,12 @@ mod tests { cx.dispatch_action(*multi_workspace, menu::Confirm); cx.run_until_parked(); - // prepare_to_close triggers a save prompt for the dirty buffer. - // Choose "Don't Save" (index 2) to discard and continue replacing. + // In multi-workspace mode, the dirty workspace is kept and a new one is + // opened alongside it — no save prompt needed. assert!( - cx.has_pending_prompt(), - "Should prompt to save dirty buffer before replacing workspace" + !cx.has_pending_prompt(), + "Should not prompt in multi-workspace mode — dirty workspace is kept" ); - cx.simulate_prompt_answer("Don't Save"); - cx.run_until_parked(); multi_workspace .update(cx, |multi_workspace, _, cx| { @@ -2012,8 +2143,8 @@ mod tests { ); assert!( - !multi_workspace.workspaces().contains(&dirty_workspace), - "The original dirty workspace should have been replaced" + multi_workspace.workspaces().any(|w| w == &dirty_workspace), + "The dirty workspace should still be present in multi-workspace mode" ); assert!( @@ -2113,6 +2244,71 @@ mod tests { .unwrap(); } + #[gpui::test] + async fn test_dev_container_modal_not_dismissed_on_backdrop_click(cx: &mut TestAppContext) { + let app_state = init_test(cx); + + app_state + .fs + .as_fake() + .insert_tree( + path!("/project"), + json!({ + ".devcontainer": { + "devcontainer.json": "{}" + }, + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + + cx.update(|cx| { + open_paths( + &[PathBuf::from(path!("/project"))], + app_state, + workspace::OpenOptions::default(), + cx, + ) + }) + .await + .unwrap(); + + assert_eq!(cx.update(|cx| cx.windows().len()), 1); + let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::().unwrap()); + + cx.run_until_parked(); + + cx.dispatch_action(*multi_workspace, OpenDevContainer); + + multi_workspace + .update(cx, |multi_workspace, _, cx| { + assert!( + multi_workspace + .active_modal::(cx) + .is_some(), + "Dev container modal should be open" + ); + }) + .unwrap(); + + // Click outside the modal (on the backdrop) to try to dismiss it + let mut vcx = VisualTestContext::from_window(*multi_workspace, cx); + vcx.simulate_click(gpui::point(px(1.0), px(1.0)), gpui::Modifiers::default()); + + multi_workspace + .update(cx, |multi_workspace, _, cx| { + assert!( + multi_workspace + .active_modal::(cx) + .is_some(), + "Dev container modal should remain open during creation" + ); + }) + .unwrap(); + } + #[gpui::test] async fn test_open_dev_container_action_with_multiple_configs(cx: &mut TestAppContext) { let app_state = init_test(cx); diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index 404b0673ab8cf220385d1a0ce41a40156d469a01..7db09c88616879010352cbc2ac0fd0549982240b 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -54,7 +54,7 @@ use util::{ rel_path::RelPath, }; use workspace::{ - AppState, ModalView, MultiWorkspace, OpenLog, OpenOptions, Toast, Workspace, + AppState, DismissDecision, ModalView, MultiWorkspace, OpenLog, OpenOptions, Toast, Workspace, notifications::{DetachAndPromptErr, NotificationId}, open_remote_project_with_existing_connection, }; @@ -69,6 +69,7 @@ pub struct RemoteServerProjects { create_new_window: bool, dev_container_picker: Option>>, _subscription: Subscription, + allow_dismissal: bool, } struct CreateRemoteServer { @@ -920,6 +921,7 @@ impl RemoteServerProjects { create_new_window, dev_container_picker: None, _subscription, + allow_dismissal: true, } } @@ -1140,6 +1142,7 @@ impl RemoteServerProjects { } fn view_in_progress_dev_container(&mut self, window: &mut Window, cx: &mut Context) { + self.allow_dismissal = false; self.mode = Mode::CreateRemoteDevContainer(CreateRemoteDevContainer::new( DevContainerCreationProgress::Creating, cx, @@ -1309,6 +1312,7 @@ impl RemoteServerProjects { cx.emit(DismissEvent); } _ => { + self.allow_dismissal = true; self.mode = Mode::default_mode(&self.ssh_config_servers, cx); self.focus_handle(cx).focus(window, cx); cx.notify(); @@ -1875,6 +1879,7 @@ impl RemoteServerProjects { .ok(); entity .update_in(cx, |remote_server_projects, window, cx| { + remote_server_projects.allow_dismissal = true; remote_server_projects.mode = Mode::CreateRemoteDevContainer(CreateRemoteDevContainer::new( DevContainerCreationProgress::Error(format!("{e}")), @@ -1897,7 +1902,8 @@ impl RemoteServerProjects { .log_err(); entity - .update(cx, |_, cx| { + .update(cx, |this, cx| { + this.allow_dismissal = true; cx.emit(DismissEvent); }) .log_err(); @@ -2948,7 +2954,15 @@ fn get_text(element: &Entity, cx: &mut App) -> String { element.read(cx).text(cx).trim().to_string() } -impl ModalView for RemoteServerProjects {} +impl ModalView for RemoteServerProjects { + fn on_before_dismiss( + &mut self, + _window: &mut Window, + _cx: &mut Context, + ) -> DismissDecision { + DismissDecision::Dismiss(self.allow_dismissal) + } +} impl Focusable for RemoteServerProjects { fn focus_handle(&self, cx: &App) -> FocusHandle { diff --git a/crates/recent_projects/src/sidebar_recent_projects.rs b/crates/recent_projects/src/sidebar_recent_projects.rs index 4741c23049b34263c9b65d6c751675543d01c3df..1fe0d2ae86aefdad45136c496f8049689d77e048 100644 --- a/crates/recent_projects/src/sidebar_recent_projects.rs +++ b/crates/recent_projects/src/sidebar_recent_projects.rs @@ -21,6 +21,8 @@ use workspace::{ WorkspaceDb, WorkspaceId, notifications::DetachAndPromptErr, }; +use zed_actions::OpenRemote; + use crate::{highlights_for_path, icon_for_remote_connection, open_remote_project}; pub struct SidebarRecentProjects { @@ -412,13 +414,35 @@ impl PickerDelegate for SidebarRecentProjectsDelegate { let open_action = workspace::Open { create_new_window: false, }; + Button::new("open_local_folder", "Add Local Project") .key_binding(KeyBinding::for_action_in(&open_action, &focus_handle, cx)) .on_click(cx.listener(move |_, _, window, cx| { + window.dispatch_action(open_action.boxed_clone(), cx); cx.emit(DismissEvent); - window.dispatch_action(open_action.boxed_clone(), cx) })) }) + .child( + Button::new("open_remote_folder", "Add Remote Project") + .key_binding(KeyBinding::for_action( + &OpenRemote { + from_existing_connection: false, + create_new_window: false, + }, + cx, + )) + .on_click(cx.listener(|_, _, window, cx| { + window.dispatch_action( + OpenRemote { + from_existing_connection: false, + create_new_window: false, + } + .boxed_clone(), + cx, + ); + cx.emit(DismissEvent); + })), + ) .into_any(), ) } diff --git a/crates/recent_projects/src/wsl_picker.rs b/crates/recent_projects/src/wsl_picker.rs index 9c08c4f5f4941a80afdd2d9cbb6f2c51ee8ec754..c53dd7c3fb68bc087216764536506f85117ffb36 100644 --- a/crates/recent_projects/src/wsl_picker.rs +++ b/crates/recent_projects/src/wsl_picker.rs @@ -246,7 +246,7 @@ impl WslOpenModal { false => !secondary, }; let open_mode = if replace_current_window { - workspace::OpenMode::Replace + workspace::OpenMode::Activate } else { workspace::OpenMode::NewWindow }; diff --git a/crates/remote/src/remote_client.rs b/crates/remote/src/remote_client.rs index f31fc9ebec028b6a42a7cbc0d61cf9574a4a0f3c..c04d3630f92bcc27afb01a619176d3ae79d3fac7 100644 --- a/crates/remote/src/remote_client.rs +++ b/crates/remote/src/remote_client.rs @@ -1273,7 +1273,7 @@ impl ConnectionPool { } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] pub enum RemoteConnectionOptions { Ssh(SshConnectionOptions), Wsl(WslConnectionOptions), @@ -1285,7 +1285,10 @@ pub enum RemoteConnectionOptions { impl RemoteConnectionOptions { pub fn display_name(&self) -> String { match self { - RemoteConnectionOptions::Ssh(opts) => opts.host.to_string(), + RemoteConnectionOptions::Ssh(opts) => opts + .nickname + .clone() + .unwrap_or_else(|| opts.host.to_string()), RemoteConnectionOptions::Wsl(opts) => opts.distro_name.clone(), RemoteConnectionOptions::Docker(opts) => { if opts.use_podman { @@ -1300,6 +1303,32 @@ impl RemoteConnectionOptions { } } +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_ssh_display_name_prefers_nickname() { + let options = RemoteConnectionOptions::Ssh(SshConnectionOptions { + host: "1.2.3.4".into(), + nickname: Some("My Cool Project".to_string()), + ..Default::default() + }); + + assert_eq!(options.display_name(), "My Cool Project"); + } + + #[test] + fn test_ssh_display_name_falls_back_to_host() { + let options = RemoteConnectionOptions::Ssh(SshConnectionOptions { + host: "1.2.3.4".into(), + ..Default::default() + }); + + assert_eq!(options.display_name(), "1.2.3.4"); + } +} + impl From for RemoteConnectionOptions { fn from(opts: SshConnectionOptions) -> Self { RemoteConnectionOptions::Ssh(opts) diff --git a/crates/remote/src/transport/docker.rs b/crates/remote/src/transport/docker.rs index 2b935e50fa49054a2668a71d30818fdd2fb57b1d..6322cd9193d383cfcd3e9ff5cb93670bcd136023 100644 --- a/crates/remote/src/transport/docker.rs +++ b/crates/remote/src/transport/docker.rs @@ -30,7 +30,18 @@ use crate::{ transport::parse_platform, }; -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +#[derive( + Debug, + Default, + Clone, + PartialEq, + Eq, + Hash, + PartialOrd, + Ord, + serde::Serialize, + serde::Deserialize, +)] pub struct DockerConnectionOptions { pub name: String, pub container_id: String, diff --git a/crates/remote/src/transport/mock.rs b/crates/remote/src/transport/mock.rs index 06e13196583fef9743e3f337bfe9cd9acf0efbca..f567d24eb122f72b4dbb79cdeb2c98c744f02da4 100644 --- a/crates/remote/src/transport/mock.rs +++ b/crates/remote/src/transport/mock.rs @@ -56,7 +56,7 @@ use std::{ use util::paths::{PathStyle, RemotePathBuf}; /// Unique identifier for a mock connection. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] pub struct MockConnectionOptions { pub id: u64, } diff --git a/crates/remote/src/transport/ssh.rs b/crates/remote/src/transport/ssh.rs index 42cfc8f86dc34712e6b2cd0e4b5d8f379e443834..1884ea43b6492efba91623eb1ab4c5a1ed4d3de1 100644 --- a/crates/remote/src/transport/ssh.rs +++ b/crates/remote/src/transport/ssh.rs @@ -45,7 +45,7 @@ pub(crate) struct SshRemoteConnection { _temp_dir: TempDir, } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] pub enum SshConnectionHost { IpAddr(IpAddr), Hostname(String), @@ -102,7 +102,7 @@ fn bracket_ipv6(host: &str) -> String { } } -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Default, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] pub struct SshConnectionOptions { pub host: SshConnectionHost, pub username: Option, diff --git a/crates/remote/src/transport/wsl.rs b/crates/remote/src/transport/wsl.rs index 5a37e1c65bfe11221b60499779c57f0ce7dca364..1bbbaca2235c0bcf14c414a9419ab9dd92b4e814 100644 --- a/crates/remote/src/transport/wsl.rs +++ b/crates/remote/src/transport/wsl.rs @@ -28,7 +28,9 @@ use util::{ shell_builder::ShellBuilder, }; -#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Deserialize, schemars::JsonSchema)] +#[derive( + Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize, schemars::JsonSchema, +)] pub struct WslConnectionOptions { pub distro_name: String, pub user: Option, diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index c725f8177648ea0ca16106251e65908255a38d6d..7bdbbad796bd2ced34ed7ccab690555457a0842b 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -191,6 +191,7 @@ impl HeadlessProject { worktree_store.clone(), toolchain_store.read(cx).as_language_toolchain_store(), environment.clone(), + git_store.clone(), cx, ); task_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx); diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 86b7f93eb2c737cac55dbf2882f91ec277e4e174..f0f23577d31075ab815d6dba1cdbdccd275c184a 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -11,6 +11,7 @@ use languages::rust_lang; use extension::ExtensionHostProxy; use fs::{FakeFs, Fs}; +use git::repository::Worktree as GitWorktree; use gpui::{AppContext as _, Entity, SharedString, TestAppContext}; use http_client::{BlockedHttpClient, FakeHttpClient}; use language::{ @@ -1539,6 +1540,87 @@ async fn test_copy_file_into_remote_project( ); } +#[gpui::test] +async fn test_remote_root_repo_common_dir(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + "/code", + json!({ + "main_repo": { + ".git": {}, + "file.txt": "content", + }, + "no_git": { + "file.txt": "content", + }, + }), + ) + .await; + + // Create a linked worktree that points back to main_repo's .git. + fs.add_linked_worktree_for_repo( + Path::new("/code/main_repo/.git"), + false, + GitWorktree { + path: PathBuf::from("/code/linked_worktree"), + ref_name: Some("refs/heads/feature-branch".into()), + sha: "abc123".into(), + is_main: false, + }, + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; + + // Main repo: root_repo_common_dir should be the .git directory itself. + let (worktree_main, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/main_repo", true, cx) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + + let common_dir = worktree_main.read_with(cx, |worktree, _| { + worktree.snapshot().root_repo_common_dir().cloned() + }); + assert_eq!( + common_dir.as_deref(), + Some(Path::new("/code/main_repo/.git")), + ); + + // Linked worktree: root_repo_common_dir should point to the main repo's .git. + let (worktree_linked, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/linked_worktree", true, cx) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + + let common_dir = worktree_linked.read_with(cx, |worktree, _| { + worktree.snapshot().root_repo_common_dir().cloned() + }); + assert_eq!( + common_dir.as_deref(), + Some(Path::new("/code/main_repo/.git")), + ); + + // No git repo: root_repo_common_dir should be None. + let (worktree_no_git, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/code/no_git", true, cx) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + + let common_dir = worktree_no_git.read_with(cx, |worktree, _| { + worktree.snapshot().root_repo_common_dir().cloned() + }); + assert_eq!(common_dir, None); +} + #[gpui::test] async fn test_remote_git_diffs(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let text_2 = " @@ -1917,6 +1999,153 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA assert_eq!(server_branch.name(), "totally-new-branch"); } +#[gpui::test] +async fn test_remote_git_checkpoints(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + path!("/code"), + json!({ + "project1": { + ".git": {}, + "file.txt": "original content", + }, + }), + ) + .await; + + let (project, _headless) = init_test(&fs, cx, server_cx).await; + + let (_worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree(path!("/code/project1"), true, cx) + }) + .await + .unwrap(); + cx.run_until_parked(); + + let repository = project.update(cx, |project, cx| project.active_repository(cx).unwrap()); + + // 1. Create a checkpoint of the original state + let checkpoint_1 = repository + .update(cx, |repository, _| repository.checkpoint()) + .await + .unwrap() + .unwrap(); + + // 2. Modify a file on the server-side fs + fs.write( + Path::new(path!("/code/project1/file.txt")), + b"modified content", + ) + .await + .unwrap(); + + // 3. Create a second checkpoint with the modified state + let checkpoint_2 = repository + .update(cx, |repository, _| repository.checkpoint()) + .await + .unwrap() + .unwrap(); + + // 4. compare_checkpoints: same checkpoint with itself => equal + let equal = repository + .update(cx, |repository, _| { + repository.compare_checkpoints(checkpoint_1.clone(), checkpoint_1.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!(equal, "a checkpoint compared with itself should be equal"); + + // 5. compare_checkpoints: different states => not equal + let equal = repository + .update(cx, |repository, _| { + repository.compare_checkpoints(checkpoint_1.clone(), checkpoint_2.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!( + !equal, + "checkpoints of different states should not be equal" + ); + + // 6. diff_checkpoints: same checkpoint => empty diff + let diff = repository + .update(cx, |repository, _| { + repository.diff_checkpoints(checkpoint_1.clone(), checkpoint_1.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!( + diff.is_empty(), + "diff of identical checkpoints should be empty" + ); + + // 7. diff_checkpoints: different checkpoints => non-empty diff mentioning the changed file + let diff = repository + .update(cx, |repository, _| { + repository.diff_checkpoints(checkpoint_1.clone(), checkpoint_2.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!( + !diff.is_empty(), + "diff of different checkpoints should be non-empty" + ); + assert!( + diff.contains("file.txt"), + "diff should mention the changed file" + ); + assert!( + diff.contains("original content"), + "diff should contain removed content" + ); + assert!( + diff.contains("modified content"), + "diff should contain added content" + ); + + // 8. restore_checkpoint: restore to original state + repository + .update(cx, |repository, _| { + repository.restore_checkpoint(checkpoint_1.clone()) + }) + .await + .unwrap() + .unwrap(); + cx.run_until_parked(); + + // 9. Create a checkpoint after restore + let checkpoint_3 = repository + .update(cx, |repository, _| repository.checkpoint()) + .await + .unwrap() + .unwrap(); + + // 10. compare_checkpoints: restored state matches original + let equal = repository + .update(cx, |repository, _| { + repository.compare_checkpoints(checkpoint_1.clone(), checkpoint_3.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!(equal, "restored state should match original checkpoint"); + + // 11. diff_checkpoints: restored state vs original => empty diff + let diff = repository + .update(cx, |repository, _| { + repository.diff_checkpoints(checkpoint_1.clone(), checkpoint_3.clone()) + }) + .await + .unwrap() + .unwrap(); + assert!(diff.is_empty(), "diff after restore should be empty"); +} + #[gpui::test] async fn test_remote_agent_fs_tool_calls(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); diff --git a/crates/repl/src/kernels/mod.rs b/crates/repl/src/kernels/mod.rs index 6745bcd9b6a08cb34b3a0fc3b8219918cb4f0dca..9f08876cd39f4b7441d8c97bd1d5344b944b09ff 100644 --- a/crates/repl/src/kernels/mod.rs +++ b/crates/repl/src/kernels/mod.rs @@ -177,6 +177,13 @@ impl PythonEnvKernelSpecification { kernelspec: self.kernelspec.clone(), } } + + pub fn is_uv(&self) -> bool { + matches!( + self.environment_kind.as_deref(), + Some("uv" | "uv (Workspace)") + ) + } } #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/crates/repl/src/repl_editor.rs b/crates/repl/src/repl_editor.rs index cf1493000edb5881bff412224f7e44dbfbf88b25..61bed513a16c3b9baf885714110c3de78a7094d5 100644 --- a/crates/repl/src/repl_editor.rs +++ b/crates/repl/src/repl_editor.rs @@ -87,6 +87,7 @@ pub fn install_ipykernel_and_assign( let python_path = env_spec.path.clone(); let env_name = env_spec.name.clone(); + let is_uv = env_spec.is_uv(); let env_spec = env_spec.clone(); struct IpykernelInstall; @@ -109,11 +110,25 @@ pub fn install_ipykernel_and_assign( let window_handle = window.window_handle(); let install_task = cx.background_spawn(async move { - let output = util::command::new_command(python_path.to_string_lossy().as_ref()) - .args(&["-m", "pip", "install", "ipykernel"]) - .output() - .await - .context("failed to run pip install ipykernel")?; + let output = if is_uv { + util::command::new_command("uv") + .args(&[ + "pip", + "install", + "ipykernel", + "--python", + &python_path.to_string_lossy(), + ]) + .output() + .await + .context("failed to run uv pip install ipykernel")? + } else { + util::command::new_command(python_path.to_string_lossy().as_ref()) + .args(&["-m", "pip", "install", "ipykernel"]) + .output() + .await + .context("failed to run pip install ipykernel")? + }; if output.status.success() { anyhow::Ok(()) @@ -146,6 +161,11 @@ pub fn install_ipykernel_and_assign( window_handle .update(cx, |_, window, cx| { + let store = ReplStore::global(cx); + store.update(cx, |store, cx| { + store.mark_ipykernel_installed(cx, &env_spec); + }); + let updated_spec = KernelSpecification::PythonEnv(PythonEnvKernelSpecification { has_ipykernel: true, diff --git a/crates/repl/src/repl_store.rs b/crates/repl/src/repl_store.rs index cf992a542830bd86c1a9ad8b1909501417f427fd..4c5827b7c0cf881725b2937cc0aef0b7e241f0f3 100644 --- a/crates/repl/src/repl_store.rs +++ b/crates/repl/src/repl_store.rs @@ -13,8 +13,8 @@ use settings::{Settings, SettingsStore}; use util::rel_path::RelPath; use crate::kernels::{ - Kernel, list_remote_kernelspecs, local_kernel_specifications, python_env_kernel_specifications, - wsl_kernel_specifications, + Kernel, PythonEnvKernelSpecification, list_remote_kernelspecs, local_kernel_specifications, + python_env_kernel_specifications, wsl_kernel_specifications, }; use crate::{JupyterSettings, KernelSpecification, Session}; @@ -136,6 +136,23 @@ impl ReplStore { cx.notify(); } + pub fn mark_ipykernel_installed( + &mut self, + cx: &mut Context, + spec: &PythonEnvKernelSpecification, + ) { + for specs in self.kernel_specifications_for_worktree.values_mut() { + for kernel_spec in specs.iter_mut() { + if let KernelSpecification::PythonEnv(env_spec) = kernel_spec { + if env_spec == spec { + env_spec.has_ipykernel = true; + } + } + } + } + cx.notify(); + } + pub fn refresh_python_kernelspecs( &mut self, worktree_id: WorktreeId, diff --git a/crates/rules_library/src/rules_library.rs b/crates/rules_library/src/rules_library.rs index 7e5a56f22d48c4d51f60d7d200dc8384582beb23..425f7d2aa3d9e9259fe005a0e15dee10e4e4baf1 100644 --- a/crates/rules_library/src/rules_library.rs +++ b/crates/rules_library/src/rules_library.rs @@ -225,6 +225,10 @@ impl PickerDelegate for RulePickerDelegate { } } + fn select_on_hover(&self) -> bool { + false + } + fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { "Search…".into() } diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index cab8e20cd22e1f4155232f36416be77d4f2ca24d..46177c5642a8d05daaf22e9fb24b205cd10ca42b 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -114,81 +114,23 @@ impl Render for BufferSearchBar { .map(|splittable_editor| { let editor_ref = splittable_editor.read(cx); let diff_view_style = editor_ref.diff_view_style(); - let is_split = editor_ref.is_split(); + + let is_split_set = diff_view_style == DiffViewStyle::Split; + let is_split_active = editor_ref.is_split(); let min_columns = EditorSettings::get_global(cx).minimum_split_diff_width as u32; - let mut split_button = IconButton::new("diff-split", IconName::DiffSplit) - .shape(IconButtonShape::Square) - .tooltip(Tooltip::element(move |_, cx| { - let message = if min_columns == 0 { - SharedString::from("Split") - } else { - format!("Split when wider than {} columns", min_columns).into() - }; - - v_flex() - .child(message) - .child( - h_flex() - .gap_0p5() - .text_ui_sm(cx) - .text_color(Color::Muted.color(cx)) - .children(render_modifiers( - &gpui::Modifiers::secondary_key(), - PlatformStyle::platform(), - None, - Some(TextSize::Small.rems(cx).into()), - false, - )) - .child("click to change min width"), - ) - .into_any() - })) - .on_click({ - let splittable_editor = splittable_editor.downgrade(); - move |_, window, cx| { - if window.modifiers().secondary() { - window.dispatch_action( - OpenSettingsAt { - path: "minimum_split_diff_width".to_string(), - } - .boxed_clone(), - cx, - ); - } else { - update_settings_file( - ::global(cx), - cx, - |settings, _| { - settings.editor.diff_view_style = - Some(DiffViewStyle::Split); - }, - ); - if diff_view_style == DiffViewStyle::Unified { - splittable_editor - .update(cx, |editor, cx| { - editor.toggle_split(&ToggleSplitDiff, window, cx); - }) - .ok(); - } - } - } - }); - - if diff_view_style == DiffViewStyle::Split { - if !is_split { - split_button = split_button.icon_color(Color::Disabled) - } else { - split_button = split_button.toggle_state(true) - } - } + let split_icon = if is_split_set && !is_split_active { + IconName::DiffSplitAuto + } else { + IconName::DiffSplit + }; h_flex() .gap_1() .child( IconButton::new("diff-unified", IconName::DiffUnified) - .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) .toggle_state(diff_view_style == DiffViewStyle::Unified) .tooltip(Tooltip::text("Unified")) .on_click({ @@ -216,7 +158,71 @@ impl Render for BufferSearchBar { } }), ) - .child(split_button) + .child( + IconButton::new("diff-split", split_icon) + .toggle_state(diff_view_style == DiffViewStyle::Split) + .icon_size(IconSize::Small) + .tooltip(Tooltip::element(move |_, cx| { + let message = if is_split_set && !is_split_active { + format!("Split when wider than {} columns", min_columns) + .into() + } else { + SharedString::from("Split") + }; + + v_flex() + .child(message) + .child( + h_flex() + .gap_0p5() + .text_ui_sm(cx) + .text_color(Color::Muted.color(cx)) + .children(render_modifiers( + &gpui::Modifiers::secondary_key(), + PlatformStyle::platform(), + None, + Some(TextSize::Small.rems(cx).into()), + false, + )) + .child("click to change min width"), + ) + .into_any() + })) + .on_click({ + let splittable_editor = splittable_editor.downgrade(); + move |_, window, cx| { + if window.modifiers().secondary() { + window.dispatch_action( + OpenSettingsAt { + path: "minimum_split_diff_width".to_string(), + } + .boxed_clone(), + cx, + ); + } else { + update_settings_file( + ::global(cx), + cx, + |settings, _| { + settings.editor.diff_view_style = + Some(DiffViewStyle::Split); + }, + ); + if diff_view_style == DiffViewStyle::Unified { + splittable_editor + .update(cx, |editor, cx| { + editor.toggle_split( + &ToggleSplitDiff, + window, + cx, + ); + }) + .ok(); + } + } + } + }), + ) }) } else { None @@ -240,7 +246,7 @@ impl Render for BufferSearchBar { let collapse_expand_icon_button = |id| { IconButton::new(id, icon) - .shape(IconButtonShape::Square) + .icon_size(IconSize::Small) .tooltip(move |_, cx| { Tooltip::for_action_in( tooltip_label, @@ -285,6 +291,7 @@ impl Render for BufferSearchBar { regex, replacement, selection, + select_all, find_in_results, } = self.supported_options(cx); @@ -455,14 +462,16 @@ impl Render for BufferSearchBar { )) }); - el.child(render_action_button( - "buffer-search-nav-button", - IconName::SelectAll, - Default::default(), - "Select All Matches", - &SelectAllMatches, - query_focus, - )) + el.when(select_all, |el| { + el.child(render_action_button( + "buffer-search-nav-button", + IconName::SelectAll, + Default::default(), + "Select All Matches", + &SelectAllMatches, + query_focus.clone(), + )) + }) .child(matches_column) }) .when(find_in_results, |el| { @@ -3400,17 +3409,15 @@ mod tests { assert_eq!(initial_location, ToolbarItemLocation::Secondary); - let mut events = cx.events(&search_bar); + let mut events = cx.events::(&search_bar); search_bar.update_in(cx, |search_bar, window, cx| { search_bar.dismiss(&Dismiss, window, cx); }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::Hidden - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Hidden)) ); search_bar.update_in(cx, |search_bar, window, cx| { @@ -3418,10 +3425,8 @@ mod tests { }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::Secondary - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Secondary)) ); } @@ -3436,17 +3441,15 @@ mod tests { assert_eq!(initial_location, ToolbarItemLocation::PrimaryLeft); - let mut events = cx.events(&search_bar); + let mut events = cx.events::(&search_bar); search_bar.update_in(cx, |search_bar, window, cx| { search_bar.dismiss(&Dismiss, window, cx); }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::PrimaryLeft - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::PrimaryLeft)) ); search_bar.update_in(cx, |search_bar, window, cx| { @@ -3454,10 +3457,8 @@ mod tests { }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::PrimaryLeft - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::PrimaryLeft)) ); } @@ -3476,17 +3477,15 @@ mod tests { assert_eq!(initial_location, ToolbarItemLocation::Hidden); - let mut events = cx.events(&search_bar); + let mut events = cx.events::(&search_bar); search_bar.update_in(cx, |search_bar, window, cx| { search_bar.dismiss(&Dismiss, window, cx); }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::Hidden - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Hidden)) ); search_bar.update_in(cx, |search_bar, window, cx| { @@ -3494,10 +3493,8 @@ mod tests { }); assert_eq!( - events.try_next().unwrap(), - Some(ToolbarItemEvent::ChangeLocation( - ToolbarItemLocation::Secondary - )) + events.try_recv().unwrap(), + (ToolbarItemEvent::ChangeLocation(ToolbarItemLocation::Secondary)) ); } @@ -3550,7 +3547,16 @@ mod tests { // Manually unfold one buffer (simulating a chevron click) let first_buffer_id = editor.read_with(cx, |editor, cx| { - editor.buffer().read(cx).excerpt_buffer_ids()[0] + editor + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .nth(0) + .unwrap() + .context + .start + .buffer_id }); editor.update_in(cx, |editor, _window, cx| { editor.unfold_buffer(first_buffer_id, cx); @@ -3564,7 +3570,16 @@ mod tests { // Manually unfold the second buffer too let second_buffer_id = editor.read_with(cx, |editor, cx| { - editor.buffer().read(cx).excerpt_buffer_ids()[1] + editor + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .nth(1) + .unwrap() + .context + .start + .buffer_id }); editor.update_in(cx, |editor, _window, cx| { editor.unfold_buffer(second_buffer_id, cx); diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 991f8d1076a985e1413b0045aa42d424f094cd9c..1bccf1ae52fb2c52a8d01e53aabb1b3ff5c7c16f 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -11,8 +11,8 @@ use crate::{ use anyhow::Context as _; use collections::HashMap; use editor::{ - Anchor, Editor, EditorEvent, EditorSettings, ExcerptId, MAX_TAB_TITLE_LEN, MultiBuffer, - PathKey, SelectionEffects, + Anchor, Editor, EditorEvent, EditorSettings, MAX_TAB_TITLE_LEN, MultiBuffer, PathKey, + SelectionEffects, actions::{Backtab, FoldAll, SelectAll, Tab, UnfoldAll}, items::active_match_index, multibuffer_context_lines, @@ -342,41 +342,32 @@ impl ProjectSearch { } fn remove_deleted_buffers(&mut self, cx: &mut Context) { - let (deleted_paths, removed_excerpt_ids) = { - let excerpts = self.excerpts.read(cx); - let deleted_paths: Vec = excerpts - .paths() - .filter(|path| { - excerpts.buffer_for_path(path, cx).is_some_and(|buffer| { - buffer - .read(cx) - .file() - .is_some_and(|file| file.disk_state().is_deleted()) - }) - }) - .cloned() - .collect(); - - let removed_excerpt_ids: collections::HashSet = deleted_paths - .iter() - .flat_map(|path| excerpts.excerpts_for_path(path)) - .collect(); - - (deleted_paths, removed_excerpt_ids) - }; + let deleted_buffer_ids = self + .excerpts + .read(cx) + .all_buffers_iter() + .filter(|buffer| { + buffer + .read(cx) + .file() + .is_some_and(|file| file.disk_state().is_deleted()) + }) + .map(|buffer| buffer.read(cx).remote_id()) + .collect::>(); - if deleted_paths.is_empty() { + if deleted_buffer_ids.is_empty() { return; } - self.excerpts.update(cx, |excerpts, cx| { - for path in deleted_paths { - excerpts.remove_excerpts_for_path(path, cx); + let snapshot = self.excerpts.update(cx, |excerpts, cx| { + for buffer_id in deleted_buffer_ids { + excerpts.remove_excerpts_for_buffer(buffer_id, cx); } + excerpts.snapshot(cx) }); self.match_ranges - .retain(|range| !removed_excerpt_ids.contains(&range.start.excerpt_id)); + .retain(|range| snapshot.anchor_to_buffer_anchor(range.start).is_some()); cx.notify(); } @@ -2990,7 +2981,13 @@ pub mod tests { .read(cx) .buffer() .read(cx) - .excerpt_buffer_ids()[0] + .snapshot(cx) + .excerpts() + .next() + .unwrap() + .context + .start + .buffer_id }) .expect("should read buffer ids"); diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 62b577c44520a6922798076cf085defea46d8688..1b75f9395e4f46ec5fd20231956d232c26005107 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -59,13 +59,13 @@ pub struct ActiveSettingsProfileName(pub String); impl Global for ActiveSettingsProfileName {} pub trait UserSettingsContentExt { - fn for_profile(&self, cx: &App) -> Option<&SettingsContent>; + fn for_profile(&self, cx: &App) -> Option<&SettingsProfile>; fn for_release_channel(&self) -> Option<&SettingsContent>; fn for_os(&self) -> Option<&SettingsContent>; } impl UserSettingsContentExt for UserSettingsContent { - fn for_profile(&self, cx: &App) -> Option<&SettingsContent> { + fn for_profile(&self, cx: &App) -> Option<&SettingsProfile> { let Some(active_profile) = cx.try_global::() else { return None; }; diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index f16f59390939171394684c9fc51e011a8f77a956..577ba43e1dd566d32eeec8993ec135633146b020 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -36,8 +36,8 @@ use crate::{ LanguageToSettingsMap, LspSettings, LspSettingsMap, SemanticTokenRules, ThemeName, UserSettingsContentExt, VsCodeSettings, WorktreeId, settings_content::{ - ExtensionsSettingsContent, ProjectSettingsContent, RootUserSettings, SettingsContent, - UserSettingsContent, merge_from::MergeFrom, + ExtensionsSettingsContent, ProfileBase, ProjectSettingsContent, RootUserSettings, + SettingsContent, UserSettingsContent, merge_from::MergeFrom, }, }; @@ -1210,10 +1210,19 @@ impl SettingsStore { merged.merge_from_option(self.extension_settings.as_deref()); merged.merge_from_option(self.global_settings.as_deref()); if let Some(user_settings) = self.user_settings.as_ref() { - merged.merge_from(&user_settings.content); - merged.merge_from_option(user_settings.for_release_channel()); - merged.merge_from_option(user_settings.for_os()); - merged.merge_from_option(user_settings.for_profile(cx)); + let active_profile = user_settings.for_profile(cx); + let should_merge_user_settings = + active_profile.is_none_or(|profile| profile.base == ProfileBase::User); + + if should_merge_user_settings { + merged.merge_from(&user_settings.content); + merged.merge_from_option(user_settings.for_release_channel()); + merged.merge_from_option(user_settings.for_os()); + } + + if let Some(profile) = active_profile { + merged.merge_from(&profile.settings); + } } merged.merge_from_option(self.server_settings.as_deref()); @@ -1431,9 +1440,7 @@ impl std::fmt::Display for InvalidSettingsError { | InvalidSettingsError::DefaultSettings { message } | InvalidSettingsError::Tasks { message, .. } | InvalidSettingsError::Editorconfig { message, .. } - | InvalidSettingsError::Debug { message, .. } => { - write!(f, "{message}") - } + | InvalidSettingsError::Debug { message, .. } => write!(f, "{message}"), } } } diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index c40b38c460a17f30b1fce26c50b40a893f7724a8..1211cbd8a4519ea295773eb0d979b48258908311 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -999,6 +999,7 @@ impl VsCodeSettings { } }), zoomed_padding: None, + focus_follows_mouse: None, } } diff --git a/crates/settings_content/src/agent.rs b/crates/settings_content/src/agent.rs index dae5c99b9ef9b5b3892b1201ff9a1686330dc365..5b1b3c014f8c538cb0dff506e05d84a80dc863d1 100644 --- a/crates/settings_content/src/agent.rs +++ b/crates/settings_content/src/agent.rs @@ -159,10 +159,10 @@ pub struct AgentSettingsContent { /// /// Default: "primary_screen" pub notify_when_agent_waiting: Option, - /// Whether to play a sound when the agent has either completed its response, or needs user input. + /// When to play a sound when the agent has either completed its response, or needs user input. /// - /// Default: false - pub play_sound_when_agent_done: Option, + /// Default: never + pub play_sound_when_agent_done: Option, /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane. /// /// Default: true @@ -209,6 +209,11 @@ pub struct AgentSettingsContent { /// /// Default: false pub show_turn_stats: Option, + /// Whether to show the merge conflict indicator in the status bar + /// that offers to resolve conflicts using the agent. + /// + /// Default: true + pub show_merge_conflict_indicator: Option, /// Per-tool permission rules for granular control over which tool actions /// require confirmation. /// @@ -347,6 +352,37 @@ pub enum NotifyWhenAgentWaiting { Never, } +#[derive( + Copy, + Clone, + Default, + Debug, + Serialize, + Deserialize, + JsonSchema, + MergeFrom, + PartialEq, + strum::VariantArray, + strum::VariantNames, +)] +#[serde(rename_all = "snake_case")] +pub enum PlaySoundWhenAgentDone { + #[default] + Never, + WhenHidden, + Always, +} + +impl PlaySoundWhenAgentDone { + pub fn should_play(&self, visible: bool) -> bool { + match self { + PlaySoundWhenAgentDone::Never => false, + PlaySoundWhenAgentDone::WhenHidden => !visible, + PlaySoundWhenAgentDone::Always => true, + } + } +} + #[with_fallible_options] #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq)] pub struct LanguageModelSelection { diff --git a/crates/settings_content/src/language_model.rs b/crates/settings_content/src/language_model.rs index fef92dc8f43d52c160c1e8c8a2fb7aeb0533e2c0..4b72c2ad3f47d834dfa38555d80a8646e3940f51 100644 --- a/crates/settings_content/src/language_model.rs +++ b/crates/settings_content/src/language_model.rs @@ -278,6 +278,7 @@ pub struct OpenAiCompatibleAvailableModel { pub max_tokens: u64, pub max_output_tokens: Option, pub max_completion_tokens: Option, + pub reasoning_effort: Option, #[serde(default)] pub capabilities: OpenAiCompatibleModelCapabilities, } diff --git a/crates/settings_content/src/project.rs b/crates/settings_content/src/project.rs index 789f3786cb0d39444370d78e92d3d342773cafd5..6e8b296ef21efa838833038582de82b3ebc4f28b 100644 --- a/crates/settings_content/src/project.rs +++ b/crates/settings_content/src/project.rs @@ -276,6 +276,18 @@ pub struct SemanticTokenRule { pub font_style: Option, } +impl SemanticTokenRule { + pub fn no_style_defined(&self) -> bool { + self.style.is_empty() + && self.foreground_color.is_none() + && self.background_color.is_none() + && self.underline.is_none() + && self.strikethrough.is_none() + && self.font_weight.is_none() + && self.font_style.is_none() + } +} + #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)] #[serde(untagged)] pub enum SemanticTokenColorOverride { diff --git a/crates/settings_content/src/settings_content.rs b/crates/settings_content/src/settings_content.rs index f8c64191dfe2602744e783f6d52484c45a7756d2..6c60a7010f7cfc5b4fadf9a8cc386fe6e3267abc 100644 --- a/crates/settings_content/src/settings_content.rs +++ b/crates/settings_content/src/settings_content.rs @@ -265,6 +265,35 @@ settings_overrides! { pub struct PlatformOverrides { macos, linux, windows } } +/// Determines what settings a profile starts from before applying its overrides. +#[derive( + Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, MergeFrom, +)] +#[serde(rename_all = "snake_case")] +pub enum ProfileBase { + /// Apply profile settings on top of the user's current settings. + #[default] + User, + /// Apply profile settings on top of Zed's default settings, ignoring user customizations. + Default, +} + +/// A named settings profile that can temporarily override settings. +#[with_fallible_options] +#[derive(Debug, Default, PartialEq, Clone, Serialize, Deserialize, JsonSchema, MergeFrom)] +pub struct SettingsProfile { + /// What base settings to start from before applying this profile's overrides. + /// + /// - `user`: Apply on top of user's settings (default) + /// - `default`: Apply on top of Zed's default settings, ignoring user customizations + #[serde(default)] + pub base: ProfileBase, + + /// The settings overrides for this profile. + #[serde(default)] + pub settings: Box, +} + #[with_fallible_options] #[derive(Debug, Default, PartialEq, Clone, Serialize, Deserialize, JsonSchema, MergeFrom)] pub struct UserSettingsContent { @@ -278,7 +307,7 @@ pub struct UserSettingsContent { pub platform_overrides: PlatformOverrides, #[serde(default)] - pub profiles: IndexMap, + pub profiles: IndexMap, } pub struct ExtensionsSettingsContent { @@ -734,6 +763,7 @@ pub struct VimSettingsContent { pub toggle_relative_line_numbers: Option, pub use_system_clipboard: Option, pub use_smartcase_find: Option, + pub use_regex_search: Option, /// When enabled, the `:substitute` command replaces all matches in a line /// by default. The 'g' flag then toggles this behavior., pub gdefault: Option, diff --git a/crates/settings_content/src/workspace.rs b/crates/settings_content/src/workspace.rs index ef00a44790fd10b8c56278362a2f552a40f52cbb..0bae7c260f6607f2015f750e5bb9dec7cc26342d 100644 --- a/crates/settings_content/src/workspace.rs +++ b/crates/settings_content/src/workspace.rs @@ -122,6 +122,9 @@ pub struct WorkspaceSettingsContent { /// What draws window decorations/titlebar, the client application (Zed) or display server /// Default: client pub window_decorations: Option, + /// Whether the focused panel follows the mouse location + /// Default: false + pub focus_follows_mouse: Option, } #[with_fallible_options] @@ -928,3 +931,10 @@ impl DocumentSymbols { self == &Self::On } } + +#[with_fallible_options] +#[derive(Copy, Clone, PartialEq, Default, Serialize, Deserialize, JsonSchema, MergeFrom, Debug)] +pub struct FocusFollowsMouse { + pub enabled: Option, + pub debounce_ms: Option, +} diff --git a/crates/settings_profile_selector/src/settings_profile_selector.rs b/crates/settings_profile_selector/src/settings_profile_selector.rs index a948b603e04c43a6740853b7c37aebb2ba8d7ee9..c273a08ce7427880a02cb375561aaaade2607b83 100644 --- a/crates/settings_profile_selector/src/settings_profile_selector.rs +++ b/crates/settings_profile_selector/src/settings_profile_selector.rs @@ -291,7 +291,7 @@ mod tests { use zed_actions::settings_profile_selector; async fn init_test( - profiles_json: serde_json::Value, + user_settings_json: serde_json::Value, cx: &mut TestAppContext, ) -> (Entity, &mut VisualTestContext) { cx.update(|cx| { @@ -307,13 +307,8 @@ mod tests { cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { - let settings_json = json!({ - "buffer_font_size": 10.0, - "profiles": profiles_json, - }); - store - .set_user_settings(&settings_json.to_string(), cx) + .set_user_settings(&user_settings_json.to_string(), cx) .unwrap(); }); }); @@ -328,7 +323,6 @@ mod tests { cx.update(|_, cx| { assert!(!cx.has_global::()); - assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(10.0)); }); (workspace, cx) @@ -354,15 +348,22 @@ mod tests { let classroom_and_streaming_profile_name = "Classroom / Streaming".to_string(); let demo_videos_profile_name = "Demo Videos".to_string(); - let profiles_json = json!({ - classroom_and_streaming_profile_name.clone(): { - "buffer_font_size": 20.0, - }, - demo_videos_profile_name.clone(): { - "buffer_font_size": 15.0 + let user_settings_json = json!({ + "buffer_font_size": 10.0, + "profiles": { + classroom_and_streaming_profile_name.clone(): { + "settings": { + "buffer_font_size": 20.0, + } + }, + demo_videos_profile_name.clone(): { + "settings": { + "buffer_font_size": 15.0 + } + } } }); - let (workspace, cx) = init_test(profiles_json.clone(), cx).await; + let (workspace, cx) = init_test(user_settings_json, cx).await; cx.dispatch_action(settings_profile_selector::Toggle); let picker = active_settings_profile_picker(&workspace, cx); @@ -575,24 +576,134 @@ mod tests { }); } + #[gpui::test] + async fn test_settings_profile_with_user_base(cx: &mut TestAppContext) { + let user_settings_json = json!({ + "buffer_font_size": 10.0, + "profiles": { + "Explicit User": { + "base": "user", + "settings": { + "buffer_font_size": 20.0 + } + }, + "Implicit User": { + "settings": { + "buffer_font_size": 20.0 + } + } + } + }); + let (workspace, cx) = init_test(user_settings_json, cx).await; + + // Select "Explicit User" (index 1) — profile applies on top of user settings. + cx.dispatch_action(settings_profile_selector::Toggle); + let picker = active_settings_profile_picker(&workspace, cx); + cx.dispatch_action(SelectNext); + + picker.read_with(cx, |picker, cx| { + assert_eq!( + picker.delegate.selected_profile_name.as_deref(), + Some("Explicit User") + ); + assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(20.0)); + }); + + cx.dispatch_action(Confirm); + + // Select "Implicit User" (index 2) — no base specified, same behavior. + cx.dispatch_action(settings_profile_selector::Toggle); + let picker = active_settings_profile_picker(&workspace, cx); + cx.dispatch_action(SelectNext); + + picker.read_with(cx, |picker, cx| { + assert_eq!( + picker.delegate.selected_profile_name.as_deref(), + Some("Implicit User") + ); + assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(20.0)); + }); + + cx.dispatch_action(Confirm); + } + + #[gpui::test] + async fn test_settings_profile_with_default_base(cx: &mut TestAppContext) { + let user_settings_json = json!({ + "buffer_font_size": 10.0, + "profiles": { + "Clean Slate": { + "base": "default" + }, + "Custom on Defaults": { + "base": "default", + "settings": { + "buffer_font_size": 30.0 + } + } + } + }); + let (workspace, cx) = init_test(user_settings_json, cx).await; + + // User has buffer_font_size: 10, factory default is 15. + cx.update(|_, cx| { + assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(10.0)); + }); + + // "Clean Slate" has base: "default" with no settings overrides, + // so we get the factory default (15), not the user's value (10). + cx.dispatch_action(settings_profile_selector::Toggle); + let picker = active_settings_profile_picker(&workspace, cx); + cx.dispatch_action(SelectNext); + + picker.read_with(cx, |picker, cx| { + assert_eq!( + picker.delegate.selected_profile_name.as_deref(), + Some("Clean Slate") + ); + assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(15.0)); + }); + + // "Custom on Defaults" has base: "default" with buffer_font_size: 30, + // so the profile's override (30) applies on top of the factory default, + // not on top of the user's value (10). + cx.dispatch_action(SelectNext); + + picker.read_with(cx, |picker, cx| { + assert_eq!( + picker.delegate.selected_profile_name.as_deref(), + Some("Custom on Defaults") + ); + assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(30.0)); + }); + + cx.dispatch_action(Confirm); + + cx.update(|_, cx| { + assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx), px(30.0)); + }); + } + #[gpui::test] async fn test_settings_profile_selector_is_in_user_configuration_order( cx: &mut TestAppContext, ) { // Must be unique names (HashMap) - let profiles_json = json!({ - "z": {}, - "e": {}, - "d": {}, - " ": {}, - "r": {}, - "u": {}, - "l": {}, - "3": {}, - "s": {}, - "!": {}, + let user_settings_json = json!({ + "profiles": { + "z": { "settings": {} }, + "e": { "settings": {} }, + "d": { "settings": {} }, + " ": { "settings": {} }, + "r": { "settings": {} }, + "u": { "settings": {} }, + "l": { "settings": {} }, + "3": { "settings": {} }, + "s": { "settings": {} }, + "!": { "settings": {} }, + } }); - let (workspace, cx) = init_test(profiles_json.clone(), cx).await; + let (workspace, cx) = init_test(user_settings_json, cx).await; cx.dispatch_action(settings_profile_selector::Toggle); let picker = active_settings_profile_picker(&workspace, cx); diff --git a/crates/settings_ui/Cargo.toml b/crates/settings_ui/Cargo.toml index 9d79481596f4b4259760ff6c2f19f8f5cf709d1e..0228f6886fc741505ffbe02fe82242d5f3e1dfd4 100644 --- a/crates/settings_ui/Cargo.toml +++ b/crates/settings_ui/Cargo.toml @@ -59,6 +59,7 @@ ui.workspace = true util.workspace = true workspace.workspace = true zed_actions.workspace = true +zed_credentials_provider.workspace = true [dev-dependencies] fs = { workspace = true, features = ["test-support"] } diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index b6d10424f4a6cf0710a916410e0e6068d80d6064..9978832c05bb29c97f118fccbe301214d81fa0c6 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -2447,7 +2447,7 @@ fn editor_page() -> SettingsPage { ] } - fn vim_settings_section() -> [SettingsPageItem; 12] { + fn vim_settings_section() -> [SettingsPageItem; 13] { [ SettingsPageItem::SectionHeader("Vim"), SettingsPageItem::SettingItem(SettingItem { @@ -2556,6 +2556,24 @@ fn editor_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Regex Search", + description: "Use regex search by default in Vim search.", + field: Box::new(SettingField { + json_path: Some("vim.use_regex_search"), + pick: |settings_content| { + settings_content.vim.as_ref()?.use_regex_search.as_ref() + }, + write: |settings_content, value| { + settings_content + .vim + .get_or_insert_default() + .use_regex_search = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Cursor Shape - Normal Mode", description: "Cursor shape for normal mode.", @@ -4159,7 +4177,7 @@ fn window_and_layout_page() -> SettingsPage { ] } - fn layout_section() -> [SettingsPageItem; 4] { + fn layout_section() -> [SettingsPageItem; 6] { [ SettingsPageItem::SectionHeader("Layout"), SettingsPageItem::SettingItem(SettingItem { @@ -4223,6 +4241,52 @@ fn window_and_layout_page() -> SettingsPage { }), metadata: None, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Focus Follows Mouse", + description: "Whether to change focus to a pane when the mouse hovers over it.", + field: Box::new(SettingField { + json_path: Some("focus_follows_mouse.enabled"), + pick: |settings_content| { + settings_content + .workspace + .focus_follows_mouse + .as_ref() + .and_then(|s| s.enabled.as_ref()) + }, + write: |settings_content, value| { + settings_content + .workspace + .focus_follows_mouse + .get_or_insert_default() + .enabled = value; + }, + }), + metadata: None, + files: USER, + }), + SettingsPageItem::SettingItem(SettingItem { + title: "Focus Follows Mouse Debounce ms", + description: "Amount of time to wait before changing focus.", + field: Box::new(SettingField { + json_path: Some("focus_follows_mouse.debounce_ms"), + pick: |settings_content| { + settings_content + .workspace + .focus_follows_mouse + .as_ref() + .and_then(|s| s.debounce_ms.as_ref()) + }, + write: |settings_content, value| { + settings_content + .workspace + .focus_follows_mouse + .get_or_insert_default() + .debounce_ms = value; + }, + }), + metadata: None, + files: USER, + }), ] } @@ -4387,7 +4451,7 @@ fn window_and_layout_page() -> SettingsPage { } fn panels_page() -> SettingsPage { - fn project_panel_section() -> [SettingsPageItem; 24] { + fn project_panel_section() -> [SettingsPageItem; 28] { [ SettingsPageItem::SectionHeader("Project Panel"), SettingsPageItem::SettingItem(SettingItem { @@ -4868,31 +4932,25 @@ fn panels_page() -> SettingsPage { files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Hidden Files", - description: "Globs to match files that will be considered \"hidden\" and can be hidden from the project panel.", - field: Box::new( - SettingField { - json_path: Some("worktree.hidden_files"), - pick: |settings_content| { - settings_content.project.worktree.hidden_files.as_ref() - }, - write: |settings_content, value| { - settings_content.project.worktree.hidden_files = value; - }, - } - .unimplemented(), - ), + title: "Sort Mode", + description: "Sort order for entries in the project panel.", + field: Box::new(SettingField { + json_path: Some("project_panel.sort_mode"), + pick: |settings_content| { + settings_content.project_panel.as_ref()?.sort_mode.as_ref() + }, + write: |settings_content, value| { + settings_content + .project_panel + .get_or_insert_default() + .sort_mode = value; + }, + }), metadata: None, files: USER, }), - ] - } - - fn auto_open_files_section() -> [SettingsPageItem; 5] { - [ - SettingsPageItem::SectionHeader("Auto Open Files"), SettingsPageItem::SettingItem(SettingItem { - title: "On Create", + title: "Auto Open Files On Create", description: "Whether to automatically open newly created files in the editor.", field: Box::new(SettingField { json_path: Some("project_panel.auto_open.on_create"), @@ -4918,7 +4976,7 @@ fn panels_page() -> SettingsPage { files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "On Paste", + title: "Auto Open Files On Paste", description: "Whether to automatically open files after pasting or duplicating them.", field: Box::new(SettingField { json_path: Some("project_panel.auto_open.on_paste"), @@ -4944,7 +5002,7 @@ fn panels_page() -> SettingsPage { files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "On Drop", + title: "Auto Open Files On Drop", description: "Whether to automatically open files dropped from external sources.", field: Box::new(SettingField { json_path: Some("project_panel.auto_open.on_drop"), @@ -4970,27 +5028,27 @@ fn panels_page() -> SettingsPage { files: USER, }), SettingsPageItem::SettingItem(SettingItem { - title: "Sort Mode", - description: "Sort order for entries in the project panel.", - field: Box::new(SettingField { - pick: |settings_content| { - settings_content.project_panel.as_ref()?.sort_mode.as_ref() - }, - write: |settings_content, value| { - settings_content - .project_panel - .get_or_insert_default() - .sort_mode = value; - }, - json_path: Some("project_panel.sort_mode"), - }), + title: "Hidden Files", + description: "Globs to match files that will be considered \"hidden\" and can be hidden from the project panel.", + field: Box::new( + SettingField { + json_path: Some("worktree.hidden_files"), + pick: |settings_content| { + settings_content.project.worktree.hidden_files.as_ref() + }, + write: |settings_content, value| { + settings_content.project.worktree.hidden_files = value; + }, + } + .unimplemented(), + ), metadata: None, files: USER, }), ] } - fn terminal_panel_section() -> [SettingsPageItem; 3] { + fn terminal_panel_section() -> [SettingsPageItem; 4] { [ SettingsPageItem::SectionHeader("Terminal Panel"), SettingsPageItem::SettingItem(SettingItem { @@ -5006,6 +5064,19 @@ fn panels_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Terminal Panel Flexible Sizing", + description: "Whether the terminal panel should use flexible (proportional) sizing when docked to the left or right.", + field: Box::new(SettingField { + json_path: Some("terminal.flexible"), + pick: |settings_content| settings_content.terminal.as_ref()?.flexible.as_ref(), + write: |settings_content, value| { + settings_content.terminal.get_or_insert_default().flexible = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Show Count Badge", description: "Show a badge on the terminal panel icon with the count of open terminals.", @@ -5666,7 +5737,7 @@ fn panels_page() -> SettingsPage { ] } - fn agent_panel_section() -> [SettingsPageItem; 5] { + fn agent_panel_section() -> [SettingsPageItem; 6] { [ SettingsPageItem::SectionHeader("Agent Panel"), SettingsPageItem::SettingItem(SettingItem { @@ -5695,6 +5766,19 @@ fn panels_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Agent Panel Flexible Sizing", + description: "Whether the agent panel should use flexible (proportional) sizing when docked to the left or right.", + field: Box::new(SettingField { + json_path: Some("agent.flexible"), + pick: |settings_content| settings_content.agent.as_ref()?.flexible.as_ref(), + write: |settings_content, value| { + settings_content.agent.get_or_insert_default().flexible = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Agent Panel Default Width", description: "Default width when the agent panel is docked to the left or right.", @@ -5735,7 +5819,6 @@ fn panels_page() -> SettingsPage { title: "Panels", items: concat_sections![ project_panel_section(), - auto_open_files_section(), terminal_panel_section(), outline_panel_section(), git_panel_section(), @@ -7278,7 +7361,7 @@ fn ai_page(cx: &App) -> SettingsPage { }), SettingsPageItem::SettingItem(SettingItem { title: "Play Sound When Agent Done", - description: "Whether to play a sound when the agent has either completed its response, or needs user input.", + description: "When to play a sound when the agent has either completed its response, or needs user input.", field: Box::new(SettingField { json_path: Some("agent.play_sound_when_agent_done"), pick: |settings_content| { @@ -7444,6 +7527,24 @@ fn ai_page(cx: &App) -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Show Merge Conflict Indicator", + description: "Whether to show the merge conflict indicator in the status bar that offers to resolve conflicts using the agent.", + field: Box::new(SettingField { + json_path: Some("agent.show_merge_conflict_indicator"), + pick: |settings_content| { + settings_content.agent.as_ref()?.show_merge_conflict_indicator.as_ref() + }, + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .show_merge_conflict_indicator = value; + }, + }), + metadata: None, + files: USER, + }), ]); items.into_boxed_slice() diff --git a/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs b/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs index 193be67aad4760763637f116fad23066438b5b61..a2a457d33eb0788ff0bed981ce5666423890f05a 100644 --- a/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs +++ b/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs @@ -185,9 +185,15 @@ fn render_api_key_provider( cx: &mut Context, ) -> impl IntoElement { let weak_page = cx.weak_entity(); + let credentials_provider = zed_credentials_provider::global(cx); _ = window.use_keyed_state(current_url(cx), cx, |_, cx| { let task = api_key_state.update(cx, |key_state, cx| { - key_state.load_if_needed(current_url(cx), |state| state, cx) + key_state.load_if_needed( + current_url(cx), + |state| state, + credentials_provider.clone(), + cx, + ) }); cx.spawn(async move |_, cx| { task.await.ok(); @@ -208,10 +214,17 @@ fn render_api_key_provider( }); let write_key = move |api_key: Option, cx: &mut App| { + let credentials_provider = zed_credentials_provider::global(cx); api_key_state .update(cx, |key_state, cx| { let url = current_url(cx); - key_state.store(url, api_key, |key_state| key_state, cx) + key_state.store( + url, + api_key, + |key_state| key_state, + credentials_provider, + cx, + ) }) .detach_and_log_err(cx); }; diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index 89268b66f4c2f20411358eb63925187c6c3f382d..4c7a98f6c0fa94e659a6db4e00aa28e2b4516e13 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -500,18 +500,18 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::>(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) - .add_basic_renderer::(render_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::>(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) + .add_basic_renderer::(render_editable_number_field) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) @@ -523,6 +523,7 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) @@ -3752,7 +3753,6 @@ fn all_projects( .flat_map(|multi_workspace| { multi_workspace .workspaces() - .iter() .map(|workspace| workspace.read(cx).project().clone()) .collect::>() }), @@ -4050,41 +4050,6 @@ fn render_toggle_button + From + Copy>( .into_any_element() } -fn render_number_field( - field: SettingField, - file: SettingsUiFile, - _metadata: Option<&SettingsFieldMetadata>, - window: &mut Window, - cx: &mut App, -) -> AnyElement { - let (_, value) = SettingsStore::global(cx).get_value_from_file(file.to_settings(), field.pick); - let value = value.copied().unwrap_or_else(T::min_value); - - let id = field - .json_path - .map(|p| format!("numeric_stepper_{}", p)) - .unwrap_or_else(|| "numeric_stepper".to_string()); - - NumberField::new(id, value, window, cx) - .tab_index(0_isize) - .on_change({ - move |value, window, cx| { - let value = *value; - update_settings_file( - file.clone(), - field.json_path, - window, - cx, - move |settings, _cx| { - (field.write)(settings, Some(value)); - }, - ) - .log_err(); // todo(settings_ui) don't log err - } - }) - .into_any_element() -} - fn render_editable_number_field( field: SettingField, file: SettingsUiFile, diff --git a/crates/sidebar/Cargo.toml b/crates/sidebar/Cargo.toml index 04ed8808a14d4c6853b08669523d55a2ebba4482..d76fd139557dd10438d7cf98f9168d87dcae9804 100644 --- a/crates/sidebar/Cargo.toml +++ b/crates/sidebar/Cargo.toml @@ -23,7 +23,6 @@ agent_settings.workspace = true agent_ui = { workspace = true, features = ["audio"] } anyhow.workspace = true chrono.workspace = true -collections.workspace = true editor.workspace = true feature_flags.workspace = true fs.workspace = true diff --git a/crates/sidebar/src/project_group_builder.rs b/crates/sidebar/src/project_group_builder.rs deleted file mode 100644 index 221a68aea97736133f8563354d41dfc4495f59e8..0000000000000000000000000000000000000000 --- a/crates/sidebar/src/project_group_builder.rs +++ /dev/null @@ -1,420 +0,0 @@ -//! The sidebar groups threads by a canonical path list. -//! -//! Threads have a path list associated with them, but this is the absolute path -//! of whatever worktrees they were associated with. In the sidebar, we want to -//! group all threads by their main worktree, and then we add a worktree chip to -//! the sidebar entry when that thread is in another worktree. -//! -//! This module is provides the functions and structures necessary to do this -//! lookup and mapping. - -use collections::{HashMap, HashSet, vecmap::VecMap}; -use std::{ - path::{Component, Path, PathBuf}, - sync::Arc, -}; - -use gpui::{App, Entity}; -use ui::SharedString; -use workspace::{MultiWorkspace, PathList, Workspace}; - -/// Identifies a project group by a set of paths the workspaces in this group -/// have. -/// -/// Paths are mapped to their main worktree path first so we can group -/// workspaces by main repos. -#[derive(PartialEq, Eq, Hash, Clone)] -pub struct ProjectGroupName { - path_list: PathList, -} - -pub(crate) fn path_suffix(path: &Path, detail: usize) -> String { - let components: Vec<_> = path - .components() - .filter_map(|c| match c { - Component::Normal(s) => Some(s.to_string_lossy()), - _ => None, - }) - .collect(); - let start = components.len().saturating_sub(detail + 1); - components[start..].join("/") -} - -impl ProjectGroupName { - pub fn display_name_from_suffixes( - &self, - path_detail_map: &HashMap, - ) -> SharedString { - let mut names = Vec::with_capacity(self.path_list.paths().len()); - for abs_path in self.path_list.paths() { - let detail = path_detail_map.get(abs_path).copied().unwrap_or(0); - let suffix = path_suffix(abs_path, detail); - if !suffix.is_empty() { - names.push(suffix); - } - } - if names.is_empty() { - "Empty Workspace".into() - } else { - names.join(", ").into() - } - } - - pub fn path_list(&self) -> &PathList { - &self.path_list - } -} - -#[derive(Default)] -pub struct ProjectGroup { - pub workspaces: Vec>, - /// Root paths of all open workspaces in this group. Used to skip - /// redundant thread-store queries for linked worktrees that already - /// have an open workspace. - covered_paths: HashSet>, -} - -impl ProjectGroup { - fn add_workspace(&mut self, workspace: &Entity, cx: &App) { - if !self.workspaces.contains(workspace) { - self.workspaces.push(workspace.clone()); - } - for path in workspace.read(cx).root_paths(cx) { - self.covered_paths.insert(path); - } - } - - pub fn first_workspace(&self) -> &Entity { - self.workspaces - .first() - .expect("groups always have at least one workspace") - } - - pub fn main_workspace(&self, cx: &App) -> &Entity { - self.workspaces - .iter() - .find(|ws| { - !crate::root_repository_snapshots(ws, cx) - .any(|snapshot| snapshot.is_linked_worktree()) - }) - .unwrap_or_else(|| self.first_workspace()) - } -} - -pub struct ProjectGroupBuilder { - /// Maps git repositories' work_directory_abs_path to their original_repo_abs_path - directory_mappings: HashMap, - project_groups: VecMap, -} - -impl ProjectGroupBuilder { - fn new() -> Self { - Self { - directory_mappings: HashMap::default(), - project_groups: VecMap::new(), - } - } - - pub fn from_multiworkspace(mw: &MultiWorkspace, cx: &App) -> Self { - let mut builder = Self::new(); - // First pass: collect all directory mappings from every workspace - // so we know how to canonicalize any path (including linked - // worktree paths discovered by the main repo's workspace). - for workspace in mw.workspaces() { - builder.add_workspace_mappings(workspace.read(cx), cx); - } - - // Second pass: group each workspace using canonical paths derived - // from the full set of mappings. - for workspace in mw.workspaces() { - let group_name = builder.canonical_workspace_paths(workspace, cx); - builder - .project_group_entry(&group_name) - .add_workspace(workspace, cx); - } - builder - } - - fn project_group_entry(&mut self, name: &ProjectGroupName) -> &mut ProjectGroup { - self.project_groups.entry_ref(name).or_insert_default() - } - - fn add_mapping(&mut self, work_directory: &Path, original_repo: &Path) { - let old = self - .directory_mappings - .insert(PathBuf::from(work_directory), PathBuf::from(original_repo)); - if let Some(old) = old { - debug_assert_eq!( - &old, original_repo, - "all worktrees should map to the same main worktree" - ); - } - } - - pub fn add_workspace_mappings(&mut self, workspace: &Workspace, cx: &App) { - for repo in workspace.project().read(cx).repositories(cx).values() { - let snapshot = repo.read(cx).snapshot(); - - self.add_mapping( - &snapshot.work_directory_abs_path, - &snapshot.original_repo_abs_path, - ); - - for worktree in snapshot.linked_worktrees.iter() { - self.add_mapping(&worktree.path, &snapshot.original_repo_abs_path); - } - } - } - - /// Derives the canonical group name for a workspace by canonicalizing - /// each of its root paths using the builder's directory mappings. - fn canonical_workspace_paths( - &self, - workspace: &Entity, - cx: &App, - ) -> ProjectGroupName { - let root_paths = workspace.read(cx).root_paths(cx); - let paths: Vec<_> = root_paths - .iter() - .map(|p| self.canonicalize_path(p).to_path_buf()) - .collect(); - ProjectGroupName { - path_list: PathList::new(&paths), - } - } - - pub fn canonicalize_path<'a>(&'a self, path: &'a Path) -> &'a Path { - self.directory_mappings - .get(path) - .map(AsRef::as_ref) - .unwrap_or(path) - } - - /// Whether the given group should load threads for a linked worktree - /// at `worktree_path`. Returns `false` if the worktree already has an - /// open workspace in the group (its threads are loaded via the - /// workspace loop) or if the worktree's canonical path list doesn't - /// match `group_path_list`. - pub fn group_owns_worktree( - &self, - group: &ProjectGroup, - group_path_list: &PathList, - worktree_path: &Path, - ) -> bool { - if group.covered_paths.contains(worktree_path) { - return false; - } - let canonical = self.canonicalize_path_list(&PathList::new(&[worktree_path])); - canonical == *group_path_list - } - - /// Canonicalizes every path in a [`PathList`] using the builder's - /// directory mappings. - fn canonicalize_path_list(&self, path_list: &PathList) -> PathList { - let paths: Vec<_> = path_list - .paths() - .iter() - .map(|p| self.canonicalize_path(p).to_path_buf()) - .collect(); - PathList::new(&paths) - } - - pub fn groups(&self) -> impl Iterator { - self.project_groups.iter() - } -} - -#[cfg(test)] -mod tests { - use std::sync::Arc; - - use super::*; - use fs::FakeFs; - use gpui::TestAppContext; - use settings::SettingsStore; - - fn init_test(cx: &mut TestAppContext) { - cx.update(|cx| { - let settings_store = SettingsStore::test(cx); - cx.set_global(settings_store); - theme_settings::init(theme::LoadThemes::JustBase, cx); - }); - } - - async fn create_fs_with_main_and_worktree(cx: &mut TestAppContext) -> Arc { - let fs = FakeFs::new(cx.executor()); - fs.insert_tree( - "/project", - serde_json::json!({ - ".git": { - "worktrees": { - "feature-a": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature-a", - }, - }, - }, - "src": {}, - }), - ) - .await; - fs.insert_tree( - "/wt/feature-a", - serde_json::json!({ - ".git": "gitdir: /project/.git/worktrees/feature-a", - "src": {}, - }), - ) - .await; - fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| { - state.worktrees.push(git::repository::Worktree { - path: std::path::PathBuf::from("/wt/feature-a"), - ref_name: Some("refs/heads/feature-a".into()), - sha: "abc".into(), - is_main: false, - }); - }) - .expect("git state should be set"); - fs - } - - #[gpui::test] - async fn test_main_repo_maps_to_itself(cx: &mut TestAppContext) { - init_test(cx); - let fs = create_fs_with_main_and_worktree(cx).await; - cx.update(|cx| ::set_global(fs.clone(), cx)); - - let project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; - project - .update(cx, |project, cx| project.git_scans_complete(cx)) - .await; - - let (multi_workspace, cx) = cx.add_window_view(|window, cx| { - workspace::MultiWorkspace::test_new(project.clone(), window, cx) - }); - - multi_workspace.read_with(cx, |mw, cx| { - let mut canonicalizer = ProjectGroupBuilder::new(); - for workspace in mw.workspaces() { - canonicalizer.add_workspace_mappings(workspace.read(cx), cx); - } - - // The main repo path should canonicalize to itself. - assert_eq!( - canonicalizer.canonicalize_path(Path::new("/project")), - Path::new("/project"), - ); - - // An unknown path returns None. - assert_eq!( - canonicalizer.canonicalize_path(Path::new("/something/else")), - Path::new("/something/else"), - ); - }); - } - - fn group_name_from_paths(paths: &[&str]) -> ProjectGroupName { - ProjectGroupName { - path_list: PathList::new(paths), - } - } - - #[test] - fn test_path_suffix_detail_zero() { - assert_eq!(path_suffix(Path::new("/a/b/c"), 0), "c"); - } - - #[test] - fn test_path_suffix_detail_one() { - assert_eq!(path_suffix(Path::new("/a/b/c"), 1), "b/c"); - } - - #[test] - fn test_path_suffix_detail_two() { - assert_eq!(path_suffix(Path::new("/a/b/c"), 2), "a/b/c"); - } - - #[test] - fn test_path_suffix_clamped() { - let result = path_suffix(Path::new("/a/b"), 5); - assert_eq!(result, "a/b"); - } - - #[test] - fn test_display_name_from_suffixes_single_path() { - let name = group_name_from_paths(&["/code/zed"]); - let map = HashMap::default(); - assert_eq!(name.display_name_from_suffixes(&map).as_ref(), "zed"); - - let map = HashMap::from_iter([(PathBuf::from("/code/zed"), 1)]); - assert_eq!(name.display_name_from_suffixes(&map).as_ref(), "code/zed"); - } - - #[test] - fn test_display_name_from_suffixes_multiple_paths() { - let name = group_name_from_paths(&["/a/zed", "/b/bar"]); - - let map = HashMap::default(); - assert_eq!( - name.display_name_from_suffixes(&map).as_ref(), - "zed, bar", - "PathList sorts lexicographically, so /a/zed comes before /b/bar" - ); - - let map = HashMap::from_iter([(PathBuf::from("/a/zed"), 1), (PathBuf::from("/b/bar"), 0)]); - assert_eq!(name.display_name_from_suffixes(&map).as_ref(), "a/zed, bar"); - } - - #[test] - fn test_display_name_from_suffixes_empty() { - let name = group_name_from_paths(&[]); - let map = HashMap::default(); - assert_eq!( - name.display_name_from_suffixes(&map).as_ref(), - "Empty Workspace" - ); - } - - #[test] - fn test_display_name_from_suffixes_per_path_detail() { - let name = group_name_from_paths(&["/code/zed", "/code/bar/zed"]); - let map = HashMap::from_iter([ - (PathBuf::from("/code/zed"), 1), - (PathBuf::from("/code/bar/zed"), 1), - ]); - assert_eq!( - name.display_name_from_suffixes(&map).as_ref(), - "bar/zed, code/zed", - ); - } - - #[gpui::test] - async fn test_worktree_checkout_canonicalizes_to_main_repo(cx: &mut TestAppContext) { - init_test(cx); - let fs = create_fs_with_main_and_worktree(cx).await; - cx.update(|cx| ::set_global(fs.clone(), cx)); - - // Open the worktree checkout as its own project. - let project = project::Project::test(fs.clone(), ["/wt/feature-a".as_ref()], cx).await; - project - .update(cx, |project, cx| project.git_scans_complete(cx)) - .await; - - let (multi_workspace, cx) = cx.add_window_view(|window, cx| { - workspace::MultiWorkspace::test_new(project.clone(), window, cx) - }); - - multi_workspace.read_with(cx, |mw, cx| { - let mut canonicalizer = ProjectGroupBuilder::new(); - for workspace in mw.workspaces() { - canonicalizer.add_workspace_mappings(workspace.read(cx), cx); - } - - // The worktree checkout path should canonicalize to the main repo. - assert_eq!( - canonicalizer.canonicalize_path(Path::new("/wt/feature-a")), - Path::new("/project"), - ); - }); - } -} diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 616535b65a5f7ad6a9e622b0165fb67e93b7e28f..0f329274d5a1df284b7ab13831b15b2c7800d3f4 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -23,7 +23,9 @@ use gpui::{ use menu::{ Cancel, Confirm, SelectChild, SelectFirst, SelectLast, SelectNext, SelectParent, SelectPrevious, }; -use project::{AgentId, AgentRegistryStore, Event as ProjectEvent, linked_worktree_short_name}; +use project::{ + AgentId, AgentRegistryStore, Event as ProjectEvent, ProjectGroupKey, linked_worktree_short_name, +}; use recent_projects::sidebar_recent_projects::SidebarRecentProjects; use remote::RemoteConnectionOptions; use ui::utils::platform_title_bar_height; @@ -54,10 +56,6 @@ use zed_actions::agents_sidebar::{FocusSidebarFilter, ToggleThreadSwitcher}; use crate::thread_switcher::{ThreadSwitcher, ThreadSwitcherEntry, ThreadSwitcherEvent}; -use crate::project_group_builder::ProjectGroupBuilder; - -mod project_group_builder; - #[cfg(test)] mod sidebar_tests; @@ -136,13 +134,7 @@ impl ActiveEntry { (ActiveEntry::Thread { session_id, .. }, ListEntry::Thread(thread)) => { thread.metadata.session_id == *session_id } - ( - ActiveEntry::Draft(workspace), - ListEntry::NewThread { - workspace: entry_workspace, - .. - }, - ) => workspace == entry_workspace, + (ActiveEntry::Draft(_workspace), ListEntry::DraftThread { .. }) => true, _ => false, } } @@ -209,9 +201,8 @@ impl ThreadEntry { #[derive(Clone)] enum ListEntry { ProjectHeader { - path_list: PathList, + key: ProjectGroupKey, label: SharedString, - workspace: Entity, highlight_positions: Vec, has_running_threads: bool, waiting_thread_count: usize, @@ -219,30 +210,25 @@ enum ListEntry { }, Thread(ThreadEntry), ViewMore { - path_list: PathList, + key: ProjectGroupKey, is_fully_expanded: bool, }, + /// The user's active draft thread. Shows a prefix of the currently-typed + /// prompt, or "Untitled Thread" if the prompt is empty. + DraftThread { + worktrees: Vec, + }, + /// A convenience row for starting a new thread. Shown when a project group + /// has no threads, or when the active workspace contains linked worktrees + /// with no threads for that specific worktree set. NewThread { - path_list: PathList, - workspace: Entity, + key: project::ProjectGroupKey, worktrees: Vec, }, } #[cfg(test)] impl ListEntry { - fn workspace(&self) -> Option> { - match self { - ListEntry::ProjectHeader { workspace, .. } => Some(workspace.clone()), - ListEntry::Thread(thread_entry) => match &thread_entry.workspace { - ThreadEntryWorkspace::Open(workspace) => Some(workspace.clone()), - ThreadEntryWorkspace::Closed(_) => None, - }, - ListEntry::ViewMore { .. } => None, - ListEntry::NewThread { workspace, .. } => Some(workspace.clone()), - } - } - fn session_id(&self) -> Option<&acp::SessionId> { match self { ListEntry::Thread(thread_entry) => Some(&thread_entry.metadata.session_id), @@ -321,27 +307,32 @@ fn workspace_path_list(workspace: &Entity, cx: &App) -> PathList { /// Derives worktree display info from a thread's stored path list. /// -/// For each path in the thread's `folder_paths` that canonicalizes to a -/// different path (i.e. it's a git worktree), produces a [`WorktreeInfo`] -/// with the short worktree name and full path. +/// For each path in the thread's `folder_paths` that is not one of the +/// group's main paths (i.e. it's a git linked worktree), produces a +/// [`WorktreeInfo`] with the short worktree name and full path. fn worktree_info_from_thread_paths( folder_paths: &PathList, - project_groups: &ProjectGroupBuilder, + group_key: &project::ProjectGroupKey, ) -> Vec { + let main_paths = group_key.path_list().paths(); folder_paths .paths() .iter() .filter_map(|path| { - let canonical = project_groups.canonicalize_path(path); - if canonical != path.as_path() { - Some(WorktreeInfo { - name: linked_worktree_short_name(canonical, path).unwrap_or_default(), - full_path: SharedString::from(path.display().to_string()), - highlight_positions: Vec::new(), - }) - } else { - None + if main_paths.iter().any(|mp| mp.as_path() == path.as_path()) { + return None; } + // Find the main path whose file name matches this linked + // worktree's file name, falling back to the first main path. + let main_path = main_paths + .iter() + .find(|mp| mp.file_name() == path.file_name()) + .or(main_paths.first())?; + Some(WorktreeInfo { + name: linked_worktree_short_name(main_path, path).unwrap_or_default(), + full_path: SharedString::from(path.display().to_string()), + highlight_positions: Vec::new(), + }) }) .collect() } @@ -443,7 +434,7 @@ impl Sidebar { }) .detach(); - let workspaces = multi_workspace.read(cx).workspaces().to_vec(); + let workspaces: Vec<_> = multi_workspace.read(cx).workspaces().cloned().collect(); cx.defer_in(window, move |this, window, cx| { for workspace in &workspaces { this.subscribe_to_workspace(workspace, window, cx); @@ -677,10 +668,37 @@ impl Sidebar { result } + /// Finds an open workspace whose project group key matches the given path list. + fn workspace_for_group(&self, path_list: &PathList, cx: &App) -> Option> { + let mw = self.multi_workspace.upgrade()?; + let mw = mw.read(cx); + mw.workspaces() + .find(|ws| ws.read(cx).project_group_key(cx).path_list() == path_list) + .cloned() + } + + /// Opens a new workspace for a group that has no open workspaces. + fn open_workspace_for_group( + &mut self, + path_list: &PathList, + window: &mut Window, + cx: &mut Context, + ) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + + multi_workspace + .update(cx, |this, cx| { + this.find_or_create_local_workspace(path_list.clone(), window, cx) + }) + .detach_and_log_err(cx); + } + /// Rebuilds the sidebar contents from current workspace and thread state. /// - /// Uses [`ProjectGroupBuilder`] to group workspaces by their main git - /// repository, then populates thread entries from the metadata store and + /// Iterates [`MultiWorkspace::project_group_keys`] to determine project + /// groups, then populates thread entries from the metadata store and /// merges live thread info from active agent panels. /// /// Aim for a single forward pass over workspaces and threads plus an @@ -697,8 +715,8 @@ impl Sidebar { return; }; let mw = multi_workspace.read(cx); - let workspaces = mw.workspaces().to_vec(); - let active_workspace = mw.workspaces().get(mw.active_workspace_index()).cloned(); + let workspaces: Vec<_> = mw.workspaces().cloned().collect(); + let active_workspace = Some(mw.workspace().clone()); let agent_server_store = workspaces .first() @@ -709,19 +727,25 @@ impl Sidebar { // Derive active_entry from the active workspace's agent panel. // Draft is checked first because a conversation can have a session_id // before any messages are sent. However, a thread that's still loading - // also appears as a "draft" (no messages yet), so when we already have - // an eager Thread write for this workspace we preserve it. A session_id - // on a non-draft is a positive Thread signal. The remaining case - // (conversation exists, not draft, no session_id) is a genuine - // mid-load — keep the previous value. + // also appears as a "draft" (no messages yet). if let Some(active_ws) = &active_workspace { if let Some(panel) = active_ws.read(cx).panel::(cx) { if panel.read(cx).active_thread_is_draft(cx) || panel.read(cx).active_conversation_view().is_none() { + let conversation_parent_id = panel + .read(cx) + .active_conversation_view() + .and_then(|cv| cv.read(cx).parent_id(cx)); let preserving_thread = - matches!(&self.active_entry, Some(ActiveEntry::Thread { .. })) - && self.active_entry_workspace() == Some(active_ws); + if let Some(ActiveEntry::Thread { session_id, .. }) = &self.active_entry { + self.active_entry_workspace() == Some(active_ws) + && conversation_parent_id + .as_ref() + .is_some_and(|id| id == session_id) + } else { + false + }; if !preserving_thread { self.active_entry = Some(ActiveEntry::Draft(active_ws.clone())); } @@ -758,11 +782,6 @@ impl Sidebar { let mut current_session_ids: HashSet = HashSet::new(); let mut project_header_indices: Vec = Vec::new(); - // Use ProjectGroupBuilder to canonically group workspaces by their - // main git repository. This replaces the manual absorbed-workspace - // detection that was here before. - let project_groups = ProjectGroupBuilder::from_multiworkspace(mw, cx); - let has_open_projects = workspaces .iter() .any(|ws| !workspace_path_list(ws, cx).paths().is_empty()); @@ -779,51 +798,41 @@ impl Sidebar { (icon, icon_from_external_svg) }; - let groups: Vec<_> = project_groups.groups().collect(); + let groups: Vec<_> = mw.project_groups(cx).collect(); let all_paths: Vec = groups .iter() - .flat_map(|(name, _)| name.path_list().paths().iter().cloned()) + .flat_map(|(key, _)| key.path_list().paths().iter().cloned()) .collect(); let path_details = util::disambiguate::compute_disambiguation_details(&all_paths, |path, detail| { - crate::project_group_builder::path_suffix(path, detail) + project::path_suffix(path, detail) }); - let path_detail_map: collections::HashMap = + let path_detail_map: HashMap = all_paths.into_iter().zip(path_details).collect(); - for (group_name, group) in &groups { - let path_list = group_name.path_list().clone(); + for (group_key, group_workspaces) in &groups { + let path_list = group_key.path_list().clone(); if path_list.paths().is_empty() { continue; } - let label = group_name.display_name_from_suffixes(&path_detail_map); + let label = group_key.display_name_from_suffixes(&path_detail_map); let is_collapsed = self.collapsed_groups.contains(&path_list); let should_load_threads = !is_collapsed || !query.is_empty(); let is_active = active_workspace .as_ref() - .is_some_and(|active| group.workspaces.contains(active)); - - // Pick a representative workspace for the group: prefer the active - // workspace if it belongs to this group, otherwise use the main - // repo workspace (not a linked worktree). - let representative_workspace = active_workspace - .as_ref() - .filter(|_| is_active) - .unwrap_or_else(|| group.main_workspace(cx)); + .is_some_and(|active| group_workspaces.contains(active)); // Collect live thread infos from all workspaces in this group. - let live_infos: Vec<_> = group - .workspaces + let live_infos: Vec<_> = group_workspaces .iter() .flat_map(|ws| all_thread_infos_for_workspace(ws, cx)) .collect(); let mut threads: Vec = Vec::new(); - let mut threadless_workspaces: Vec<(Entity, Vec)> = Vec::new(); let mut has_running_threads = false; let mut waiting_thread_count: usize = 0; @@ -831,61 +840,88 @@ impl Sidebar { let mut seen_session_ids: HashSet = HashSet::new(); let thread_store = ThreadMetadataStore::global(cx); - // Load threads from each workspace in the group. - for workspace in &group.workspaces { - let ws_path_list = workspace_path_list(workspace, cx); - let mut workspace_rows = thread_store - .read(cx) - .entries_for_path(&ws_path_list) - .cloned() - .peekable(); - if workspace_rows.peek().is_none() { - let worktrees = - worktree_info_from_thread_paths(&ws_path_list, &project_groups); - threadless_workspaces.push((workspace.clone(), worktrees)); + // Build a lookup from workspace root paths to their workspace + // entity, used to assign ThreadEntryWorkspace::Open for threads + // whose folder_paths match an open workspace. + let workspace_by_path_list: HashMap> = + group_workspaces + .iter() + .map(|ws| (workspace_path_list(ws, cx), ws)) + .collect(); + + // Resolve a ThreadEntryWorkspace for a thread row. If any open + // workspace's root paths match the thread's folder_paths, use + // Open; otherwise use Closed. + let resolve_workspace = |row: &ThreadMetadata| -> ThreadEntryWorkspace { + workspace_by_path_list + .get(&row.folder_paths) + .map(|ws| ThreadEntryWorkspace::Open((*ws).clone())) + .unwrap_or_else(|| ThreadEntryWorkspace::Closed(row.folder_paths.clone())) + }; + + // Build a ThreadEntry from a metadata row. + let make_thread_entry = |row: ThreadMetadata, + workspace: ThreadEntryWorkspace| + -> ThreadEntry { + let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id); + let worktrees = worktree_info_from_thread_paths(&row.folder_paths, &group_key); + ThreadEntry { + metadata: row, + icon, + icon_from_external_svg, + status: AgentThreadStatus::default(), + workspace, + is_live: false, + is_background: false, + is_title_generating: false, + highlight_positions: Vec::new(), + worktrees, + diff_stats: DiffStats::default(), } - for row in workspace_rows { - if !seen_session_ids.insert(row.session_id.clone()) { - continue; - } - let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id); - let worktrees = - worktree_info_from_thread_paths(&row.folder_paths, &project_groups); - threads.push(ThreadEntry { - metadata: row, - icon, - icon_from_external_svg, - status: AgentThreadStatus::default(), - workspace: ThreadEntryWorkspace::Open(workspace.clone()), - is_live: false, - is_background: false, - is_title_generating: false, - highlight_positions: Vec::new(), - worktrees, - diff_stats: DiffStats::default(), - }); + }; + + // === Main code path: one query per group via main_worktree_paths === + // The main_worktree_paths column is set on all new threads and + // points to the group's canonical paths regardless of which + // linked worktree the thread was opened in. + for row in thread_store + .read(cx) + .entries_for_main_worktree_path(&path_list) + .cloned() + { + if !seen_session_ids.insert(row.session_id.clone()) { + continue; } + let workspace = resolve_workspace(&row); + threads.push(make_thread_entry(row, workspace)); } - // Load threads from linked git worktrees whose - // canonical paths belong to this group. - let linked_worktree_queries = group - .workspaces - .iter() - .flat_map(|ws| root_repository_snapshots(ws, cx)) - .filter(|snapshot| !snapshot.is_linked_worktree()) - .flat_map(|snapshot| { - snapshot - .linked_worktrees() - .iter() - .filter(|wt| { - project_groups.group_owns_worktree(group, &path_list, &wt.path) - }) - .map(|wt| PathList::new(std::slice::from_ref(&wt.path))) - .collect::>() - }); + // Legacy threads did not have `main_worktree_paths` populated, so they + // must be queried by their `folder_paths`. + + // Load any legacy threads for the main worktrees of this project group. + for row in thread_store.read(cx).entries_for_path(&path_list).cloned() { + if !seen_session_ids.insert(row.session_id.clone()) { + continue; + } + let workspace = resolve_workspace(&row); + threads.push(make_thread_entry(row, workspace)); + } - for worktree_path_list in linked_worktree_queries { + // Load any legacy threads for any single linked wortree of this project group. + let mut linked_worktree_paths = HashSet::new(); + for workspace in group_workspaces { + if workspace.read(cx).visible_worktrees(cx).count() != 1 { + continue; + } + for snapshot in root_repository_snapshots(workspace, cx) { + for linked_worktree in snapshot.linked_worktrees() { + linked_worktree_paths.insert(linked_worktree.path.clone()); + } + } + } + for path in linked_worktree_paths { + let worktree_path_list = PathList::new(std::slice::from_ref(&path)); for row in thread_store .read(cx) .entries_for_path(&worktree_path_list) @@ -894,22 +930,10 @@ impl Sidebar { if !seen_session_ids.insert(row.session_id.clone()) { continue; } - let (icon, icon_from_external_svg) = resolve_agent_icon(&row.agent_id); - let worktrees = - worktree_info_from_thread_paths(&row.folder_paths, &project_groups); - threads.push(ThreadEntry { - metadata: row, - icon, - icon_from_external_svg, - status: AgentThreadStatus::default(), - workspace: ThreadEntryWorkspace::Closed(worktree_path_list.clone()), - is_live: false, - is_background: false, - is_title_generating: false, - highlight_positions: Vec::new(), - worktrees, - diff_stats: DiffStats::default(), - }); + threads.push(make_thread_entry( + row, + ThreadEntryWorkspace::Closed(worktree_path_list.clone()), + )); } } @@ -936,21 +960,21 @@ impl Sidebar { let session_id = &thread.metadata.session_id; - let is_thread_workspace_active = match &thread.workspace { - ThreadEntryWorkspace::Open(thread_workspace) => active_workspace - .as_ref() - .is_some_and(|active| active == thread_workspace), - ThreadEntryWorkspace::Closed(_) => false, - }; + let is_active_thread = self.active_entry.as_ref().is_some_and(|entry| { + entry.is_active_thread(session_id) + && active_workspace + .as_ref() + .is_some_and(|active| active == entry.workspace()) + }); if thread.status == AgentThreadStatus::Completed - && !is_thread_workspace_active + && !is_active_thread && old_statuses.get(session_id) == Some(&AgentThreadStatus::Running) { notified_threads.insert(session_id.clone()); } - if is_thread_workspace_active && !thread.is_background { + if is_active_thread && !thread.is_background { notified_threads.remove(session_id); } } @@ -1013,9 +1037,8 @@ impl Sidebar { project_header_indices.push(entries.len()); entries.push(ListEntry::ProjectHeader { - path_list: path_list.clone(), + key: group_key.clone(), label, - workspace: representative_workspace.clone(), highlight_positions: workspace_highlight_positions, has_running_threads, waiting_thread_count, @@ -1027,15 +1050,13 @@ impl Sidebar { entries.push(thread.into()); } } else { - let is_draft_for_workspace = is_active - && matches!(&self.active_entry, Some(ActiveEntry::Draft(_))) - && self.active_entry_workspace() == Some(representative_workspace); + let is_draft_for_group = is_active + && matches!(&self.active_entry, Some(ActiveEntry::Draft(ws)) if group_workspaces.contains(ws)); project_header_indices.push(entries.len()); entries.push(ListEntry::ProjectHeader { - path_list: path_list.clone(), + key: group_key.clone(), label, - workspace: representative_workspace.clone(), highlight_positions: Vec::new(), has_running_threads, waiting_thread_count, @@ -1046,25 +1067,61 @@ impl Sidebar { continue; } - // Emit "New Thread" entries for threadless workspaces - // and active drafts, right after the header. - for (workspace, worktrees) in &threadless_workspaces { - entries.push(ListEntry::NewThread { - path_list: path_list.clone(), - workspace: workspace.clone(), - worktrees: worktrees.clone(), - }); + // Emit a DraftThread entry when the active draft belongs to this group. + if is_draft_for_group { + if let Some(ActiveEntry::Draft(draft_ws)) = &self.active_entry { + let ws_path_list = workspace_path_list(draft_ws, cx); + let worktrees = worktree_info_from_thread_paths(&ws_path_list, &group_key); + entries.push(ListEntry::DraftThread { worktrees }); + } } - if is_draft_for_workspace - && !threadless_workspaces - .iter() - .any(|(ws, _)| ws == representative_workspace) + + // Emit a NewThread entry when: + // 1. The group has zero threads (convenient affordance). + // 2. The active workspace has linked worktrees but no threads + // for the active workspace's specific set of worktrees. + let group_has_no_threads = threads.is_empty() && !group_workspaces.is_empty(); + let active_ws_has_threadless_linked_worktrees = is_active + && !is_draft_for_group + && active_workspace.as_ref().is_some_and(|active_ws| { + let ws_path_list = workspace_path_list(active_ws, cx); + let has_linked_worktrees = + !worktree_info_from_thread_paths(&ws_path_list, &group_key).is_empty(); + if !has_linked_worktrees { + return false; + } + let thread_store = ThreadMetadataStore::global(cx); + let has_threads_for_ws = thread_store + .read(cx) + .entries_for_path(&ws_path_list) + .next() + .is_some() + || thread_store + .read(cx) + .entries_for_main_worktree_path(&ws_path_list) + .next() + .is_some(); + !has_threads_for_ws + }); + + if !is_draft_for_group + && (group_has_no_threads || active_ws_has_threadless_linked_worktrees) { - let ws_path_list = workspace_path_list(representative_workspace, cx); - let worktrees = worktree_info_from_thread_paths(&ws_path_list, &project_groups); + let worktrees = if active_ws_has_threadless_linked_worktrees { + active_workspace + .as_ref() + .map(|ws| { + worktree_info_from_thread_paths( + &workspace_path_list(ws, cx), + &group_key, + ) + }) + .unwrap_or_default() + } else { + Vec::new() + }; entries.push(ListEntry::NewThread { - path_list: path_list.clone(), - workspace: representative_workspace.clone(), + key: group_key.clone(), worktrees, }); } @@ -1110,7 +1167,7 @@ impl Sidebar { if total > DEFAULT_THREADS_SHOWN { entries.push(ListEntry::ViewMore { - path_list: path_list.clone(), + key: group_key.clone(), is_fully_expanded, }); } @@ -1198,9 +1255,8 @@ impl Sidebar { let rendered = match entry { ListEntry::ProjectHeader { - path_list, + key, label, - workspace, highlight_positions, has_running_threads, waiting_thread_count, @@ -1208,9 +1264,8 @@ impl Sidebar { } => self.render_project_header( ix, false, - path_list, + key, label, - workspace, highlight_positions, *has_running_threads, *waiting_thread_count, @@ -1220,29 +1275,22 @@ impl Sidebar { ), ListEntry::Thread(thread) => self.render_thread(ix, thread, is_active, is_selected, cx), ListEntry::ViewMore { - path_list, + key, is_fully_expanded, - } => self.render_view_more(ix, path_list, *is_fully_expanded, is_selected, cx), - ListEntry::NewThread { - path_list, - workspace, - worktrees, - } => self.render_new_thread( - ix, - path_list, - workspace, - is_active, - worktrees, - is_selected, - cx, - ), + } => self.render_view_more(ix, key.path_list(), *is_fully_expanded, is_selected, cx), + ListEntry::DraftThread { worktrees, .. } => { + self.render_draft_thread(ix, is_active, worktrees, is_selected, cx) + } + ListEntry::NewThread { key, worktrees, .. } => { + self.render_new_thread(ix, key, worktrees, is_selected, cx) + } }; if is_group_header_after_first { v_flex() .w_full() .border_t_1() - .border_color(cx.theme().colors().border.opacity(0.5)) + .border_color(cx.theme().colors().border) .child(rendered) .into_any_element() } else { @@ -1253,13 +1301,9 @@ impl Sidebar { fn render_remote_project_icon( &self, ix: usize, - workspace: &Entity, - cx: &mut Context, + host: Option<&RemoteConnectionOptions>, ) -> Option { - let project = workspace.read(cx).project().read(cx); - let remote_connection_options = project.remote_connection_options(cx)?; - - let remote_icon_per_type = match remote_connection_options { + let remote_icon_per_type = match host? { RemoteConnectionOptions::Wsl(_) => IconName::Linux, RemoteConnectionOptions::Docker(_) => IconName::Box, _ => IconName::Server, @@ -1282,16 +1326,18 @@ impl Sidebar { &self, ix: usize, is_sticky: bool, - path_list: &PathList, + key: &ProjectGroupKey, label: &SharedString, - workspace: &Entity, highlight_positions: &[usize], has_running_threads: bool, waiting_thread_count: usize, is_active: bool, - is_selected: bool, + is_focused: bool, cx: &mut Context, ) -> AnyElement { + let path_list = key.path_list(); + let host = key.host(); + let id_prefix = if is_sticky { "sticky-" } else { "" }; let id = SharedString::from(format!("{id_prefix}project-header-{ix}")); let disclosure_id = SharedString::from(format!("disclosure-{ix}")); @@ -1304,16 +1350,15 @@ impl Sidebar { (IconName::ChevronDown, "Collapse Project") }; - let has_new_thread_entry = self - .contents - .entries - .get(ix + 1) - .is_some_and(|entry| matches!(entry, ListEntry::NewThread { .. })); + let has_new_thread_entry = self.contents.entries.get(ix + 1).is_some_and(|entry| { + matches!( + entry, + ListEntry::NewThread { .. } | ListEntry::DraftThread { .. } + ) + }); let show_new_thread_button = !has_new_thread_entry && !self.has_filter_query(cx); - let workspace_for_remove = workspace.clone(); - let workspace_for_menu = workspace.clone(); - let workspace_for_open = workspace.clone(); + let workspace = self.workspace_for_group(path_list, cx); let path_list_for_toggle = path_list.clone(); let path_list_for_collapse = path_list.clone(); @@ -1321,11 +1366,11 @@ impl Sidebar { let label = if highlight_positions.is_empty() { Label::new(label.clone()) - .color(Color::Muted) + .when(!is_active, |this| this.color(Color::Muted)) .into_any_element() } else { HighlightedLabel::new(label.clone(), highlight_positions.to_vec()) - .color(Color::Muted) + .when(!is_active, |this| this.color(Color::Muted)) .into_any_element() }; @@ -1343,14 +1388,13 @@ impl Sidebar { .pr_1p5() .border_1() .map(|this| { - if is_selected { + if is_focused { this.border_color(color.border_focused) } else { this.border_color(gpui::transparent_black()) } }) .justify_between() - .hover(|s| s.bg(hover_color)) .child( h_flex() .when(!is_active, |this| this.cursor_pointer()) @@ -1371,7 +1415,7 @@ impl Sidebar { ) .child(label) .when_some( - self.render_remote_project_icon(ix, workspace, cx), + self.render_remote_project_icon(ix, host.as_ref()), |this, icon| this.child(icon), ) .when(is_collapsed, |this| { @@ -1404,10 +1448,7 @@ impl Sidebar { }) }), ) - .child({ - let workspace_for_new_thread = workspace.clone(); - let path_list_for_new_thread = path_list.clone(); - + .child( h_flex() .when(self.project_header_menu_ix != Some(ix), |this| { this.visible_on_hover(group_name) @@ -1415,13 +1456,7 @@ impl Sidebar { .on_mouse_down(gpui::MouseButton::Left, |_, _, cx| { cx.stop_propagation(); }) - .child(self.render_project_header_menu( - ix, - id_prefix, - &workspace_for_menu, - &workspace_for_remove, - cx, - )) + .child(self.render_project_header_menu(ix, id_prefix, key, cx)) .when(view_more_expanded && !is_collapsed, |this| { this.child( IconButton::new( @@ -1431,7 +1466,6 @@ impl Sidebar { IconName::ListCollapse, ) .icon_size(IconSize::Small) - .icon_color(Color::Muted) .tooltip(Tooltip::text("Collapse Displayed Threads")) .on_click(cx.listener({ let path_list_for_collapse = path_list_for_collapse.clone(); @@ -1444,51 +1478,50 @@ impl Sidebar { })), ) }) - .when(show_new_thread_button, |this| { - this.child( - IconButton::new( - SharedString::from(format!( - "{id_prefix}project-header-new-thread-{ix}", + .when_some( + workspace.filter(|_| show_new_thread_button), + |this, workspace| { + let path_list = path_list.clone(); + this.child( + IconButton::new( + SharedString::from(format!( + "{id_prefix}project-header-new-thread-{ix}", + )), + IconName::Plus, + ) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("New Thread")) + .on_click(cx.listener( + move |this, _, window, cx| { + this.collapsed_groups.remove(&path_list); + this.selection = None; + this.create_new_thread(&workspace, window, cx); + }, )), - IconName::Plus, ) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .tooltip(Tooltip::text("New Thread")) - .on_click(cx.listener({ - let workspace_for_new_thread = workspace_for_new_thread.clone(); - let path_list_for_new_thread = path_list_for_new_thread.clone(); - move |this, _, window, cx| { - // Uncollapse the group if collapsed so - // the new-thread entry becomes visible. - this.collapsed_groups.remove(&path_list_for_new_thread); - this.selection = None; - this.create_new_thread(&workspace_for_new_thread, window, cx); - } - })), - ) - }) - }) + }, + ), + ) .when(!is_active, |this| { - this.tooltip(Tooltip::text("Activate Workspace")) - .on_click(cx.listener({ - move |this, _, window, cx| { - this.active_entry = - Some(ActiveEntry::Draft(workspace_for_open.clone())); + let path_list = path_list.clone(); + this.cursor_pointer() + .hover(|s| s.bg(hover_color)) + .tooltip(Tooltip::text("Open Workspace")) + .on_click(cx.listener(move |this, _, window, cx| { + if let Some(workspace) = this.workspace_for_group(&path_list, cx) { + this.active_entry = Some(ActiveEntry::Draft(workspace.clone())); if let Some(multi_workspace) = this.multi_workspace.upgrade() { multi_workspace.update(cx, |multi_workspace, cx| { - multi_workspace.activate( - workspace_for_open.clone(), - window, - cx, - ); + multi_workspace.activate(workspace.clone(), window, cx); }); } - if AgentPanel::is_visible(&workspace_for_open, cx) { - workspace_for_open.update(cx, |workspace, cx| { + if AgentPanel::is_visible(&workspace, cx) { + workspace.update(cx, |workspace, cx| { workspace.focus_panel::(window, cx); }); } + } else { + this.open_workspace_for_group(&path_list, window, cx); } })) }) @@ -1499,14 +1532,12 @@ impl Sidebar { &self, ix: usize, id_prefix: &str, - workspace: &Entity, - workspace_for_remove: &Entity, + project_group_key: &ProjectGroupKey, cx: &mut Context, ) -> impl IntoElement { - let workspace_for_menu = workspace.clone(); - let workspace_for_remove = workspace_for_remove.clone(); let multi_workspace = self.multi_workspace.clone(); let this = cx.weak_entity(); + let project_group_key = project_group_key.clone(); PopoverMenu::new(format!("{id_prefix}project-header-menu-{ix}")) .on_open(Rc::new({ @@ -1520,116 +1551,102 @@ impl Sidebar { } })) .menu(move |window, cx| { - let workspace = workspace_for_menu.clone(); - let workspace_for_remove = workspace_for_remove.clone(); let multi_workspace = multi_workspace.clone(); + let project_group_key = project_group_key.clone(); let menu = ContextMenu::build_persistent(window, cx, move |menu, _window, cx| { - let worktrees: Vec<_> = workspace - .read(cx) - .visible_worktrees(cx) - .map(|worktree| { - let worktree_read = worktree.read(cx); - let id = worktree_read.id(); - let name: SharedString = - worktree_read.root_name().as_unix_str().to_string().into(); - (id, name) - }) - .collect(); - - let worktree_count = worktrees.len(); - let mut menu = menu .header("Project Folders") .end_slot_action(Box::new(menu::EndSlot)); - for (worktree_id, name) in &worktrees { - let worktree_id = *worktree_id; - let workspace_for_worktree = workspace.clone(); - let workspace_for_remove_worktree = workspace_for_remove.clone(); - let multi_workspace_for_worktree = multi_workspace.clone(); - - let remove_handler = move |window: &mut Window, cx: &mut App| { - if worktree_count <= 1 { - if let Some(mw) = multi_workspace_for_worktree.upgrade() { - let ws = workspace_for_remove_worktree.clone(); - mw.update(cx, |multi_workspace, cx| { - multi_workspace.remove(&ws, window, cx); - }); - } - } else { - workspace_for_worktree.update(cx, |workspace, cx| { - workspace.project().update(cx, |project, cx| { - project.remove_worktree(worktree_id, cx); - }); - }); - } + for path in project_group_key.path_list().paths() { + let Some(name) = path.file_name() else { + continue; }; - + let name: SharedString = name.to_string_lossy().into_owned().into(); + let path = path.clone(); + let project_group_key = project_group_key.clone(); + let multi_workspace = multi_workspace.clone(); menu = menu.entry_with_end_slot_on_hover( name.clone(), None, |_, _| {}, IconName::Close, "Remove Folder".into(), - remove_handler, + move |_window, cx| { + multi_workspace + .update(cx, |multi_workspace, cx| { + multi_workspace.remove_folder_from_project_group( + &project_group_key, + &path, + cx, + ); + }) + .ok(); + }, ); } - let workspace_for_add = workspace.clone(); - let multi_workspace_for_add = multi_workspace.clone(); let menu = menu.separator().entry( "Add Folder to Project", Some(Box::new(AddFolderToProject)), - move |window, cx| { - if let Some(mw) = multi_workspace_for_add.upgrade() { - mw.update(cx, |mw, cx| { - mw.activate(workspace_for_add.clone(), window, cx); - }); + { + let project_group_key = project_group_key.clone(); + let multi_workspace = multi_workspace.clone(); + move |window, cx| { + multi_workspace + .update(cx, |multi_workspace, cx| { + multi_workspace.prompt_to_add_folders_to_project_group( + &project_group_key, + window, + cx, + ); + }) + .ok(); } - workspace_for_add.update(cx, |workspace, cx| { - workspace.add_folder_to_project(&AddFolderToProject, window, cx); - }); }, ); - let workspace_count = multi_workspace + let group_count = multi_workspace .upgrade() - .map_or(0, |mw| mw.read(cx).workspaces().len()); - let menu = if workspace_count > 1 { - let workspace_for_move = workspace.clone(); - let multi_workspace_for_move = multi_workspace.clone(); + .map_or(0, |mw| mw.read(cx).project_group_keys().count()); + let menu = if group_count > 1 { + let project_group_key = project_group_key.clone(); + let multi_workspace = multi_workspace.clone(); menu.entry( "Move to New Window", Some(Box::new( zed_actions::agents_sidebar::MoveWorkspaceToNewWindow, )), move |window, cx| { - if let Some(mw) = multi_workspace_for_move.upgrade() { - mw.update(cx, |multi_workspace, cx| { - multi_workspace.move_workspace_to_new_window( - &workspace_for_move, + multi_workspace + .update(cx, |multi_workspace, cx| { + multi_workspace.move_project_group_to_new_window( + &project_group_key, window, cx, ); - }); - } + }) + .ok(); }, ) } else { menu }; - let workspace_for_remove = workspace_for_remove.clone(); - let multi_workspace_for_remove = multi_workspace.clone(); + let project_group_key = project_group_key.clone(); + let multi_workspace = multi_workspace.clone(); menu.separator() .entry("Remove Project", None, move |window, cx| { - if let Some(mw) = multi_workspace_for_remove.upgrade() { - let ws = workspace_for_remove.clone(); - mw.update(cx, |multi_workspace, cx| { - multi_workspace.remove(&ws, window, cx); - }); - } + multi_workspace + .update(cx, |multi_workspace, cx| { + multi_workspace.remove_project_group( + &project_group_key, + window, + cx, + ); + }) + .ok(); }) }); @@ -1652,8 +1669,7 @@ impl Sidebar { IconName::Ellipsis, ) .selected_style(ButtonStyle::Tinted(TintColor::Accent)) - .icon_size(IconSize::Small) - .icon_color(Color::Muted), + .icon_size(IconSize::Small), ) .anchor(gpui::Corner::TopRight) .offset(gpui::Point { @@ -1684,9 +1700,8 @@ impl Sidebar { } let ListEntry::ProjectHeader { - path_list, + key, label, - workspace, highlight_positions, has_running_threads, waiting_thread_count, @@ -1702,9 +1717,8 @@ impl Sidebar { let header_element = self.render_project_header( header_idx, true, - &path_list, + key, &label, - workspace, &highlight_positions, *has_running_threads, *waiting_thread_count, @@ -1767,7 +1781,11 @@ impl Sidebar { dispatch_context.add("ThreadsSidebar"); dispatch_context.add("menu"); - let identifier = if self.filter_editor.focus_handle(cx).is_focused(window) { + let is_archived_search_focused = matches!(&self.view, SidebarView::Archive(archive) if archive.read(cx).is_filter_editor_focused(window, cx)); + + let identifier = if self.filter_editor.focus_handle(cx).is_focused(window) + || is_archived_search_focused + { "searching" } else { "not_searching" @@ -1925,8 +1943,8 @@ impl Sidebar { }; match entry { - ListEntry::ProjectHeader { path_list, .. } => { - let path_list = path_list.clone(); + ListEntry::ProjectHeader { key, .. } => { + let path_list = key.path_list().clone(); self.toggle_collapse(&path_list, window, cx); } ListEntry::Thread(thread) => { @@ -1947,11 +1965,11 @@ impl Sidebar { } } ListEntry::ViewMore { - path_list, + key, is_fully_expanded, .. } => { - let path_list = path_list.clone(); + let path_list = key.path_list().clone(); if *is_fully_expanded { self.expanded_groups.remove(&path_list); } else { @@ -1961,9 +1979,16 @@ impl Sidebar { self.serialize(cx); self.update_entries(cx); } - ListEntry::NewThread { workspace, .. } => { - let workspace = workspace.clone(); - self.create_new_thread(&workspace, window, cx); + ListEntry::DraftThread { .. } => { + // Already active — nothing to do. + } + ListEntry::NewThread { key, .. } => { + let path_list = key.path_list().clone(); + if let Some(workspace) = self.workspace_for_group(&path_list, cx) { + self.create_new_thread(&workspace, window, cx); + } else { + self.open_workspace_for_group(&path_list, window, cx); + } } } } @@ -1980,7 +2005,6 @@ impl Sidebar { let workspace = window.read(cx).ok().and_then(|multi_workspace| { multi_workspace .workspaces() - .iter() .find(|workspace| predicate(workspace, cx)) .cloned() })?; @@ -1997,7 +2021,6 @@ impl Sidebar { multi_workspace .read(cx) .workspaces() - .iter() .find(|workspace| predicate(workspace, cx)) .cloned() }) @@ -2133,16 +2156,12 @@ impl Sidebar { return; }; - let paths: Vec = - path_list.paths().iter().map(|p| p.to_path_buf()).collect(); - - let open_task = multi_workspace.update(cx, |mw, cx| { - mw.open_project(paths, workspace::OpenMode::Activate, window, cx) + let open_task = multi_workspace.update(cx, |this, cx| { + this.find_or_create_local_workspace(path_list, window, cx) }); cx.spawn_in(window, async move |this, cx| { let workspace = open_task.await?; - this.update_in(cx, |this, window, cx| { this.activate_thread(metadata, &workspace, window, cx); })?; @@ -2194,12 +2213,10 @@ impl Sidebar { return; } - let active_workspace = self.multi_workspace.upgrade().and_then(|w| { - w.read(cx) - .workspaces() - .get(w.read(cx).active_workspace_index()) - .cloned() - }); + let active_workspace = self + .multi_workspace + .upgrade() + .map(|w| w.read(cx).workspace().clone()); if let Some(workspace) = active_workspace { self.activate_thread_locally(&metadata, &workspace, window, cx); @@ -2215,9 +2232,9 @@ impl Sidebar { let Some(ix) = self.selection else { return }; match self.contents.entries.get(ix) { - Some(ListEntry::ProjectHeader { path_list, .. }) => { - if self.collapsed_groups.contains(path_list) { - let path_list = path_list.clone(); + Some(ListEntry::ProjectHeader { key, .. }) => { + if self.collapsed_groups.contains(key.path_list()) { + let path_list = key.path_list().clone(); self.collapsed_groups.remove(&path_list); self.update_entries(cx); } else if ix + 1 < self.contents.entries.len() { @@ -2239,23 +2256,23 @@ impl Sidebar { let Some(ix) = self.selection else { return }; match self.contents.entries.get(ix) { - Some(ListEntry::ProjectHeader { path_list, .. }) => { - if !self.collapsed_groups.contains(path_list) { - let path_list = path_list.clone(); - self.collapsed_groups.insert(path_list); + Some(ListEntry::ProjectHeader { key, .. }) => { + if !self.collapsed_groups.contains(key.path_list()) { + self.collapsed_groups.insert(key.path_list().clone()); self.update_entries(cx); } } Some( - ListEntry::Thread(_) | ListEntry::ViewMore { .. } | ListEntry::NewThread { .. }, + ListEntry::Thread(_) + | ListEntry::ViewMore { .. } + | ListEntry::NewThread { .. } + | ListEntry::DraftThread { .. }, ) => { for i in (0..ix).rev() { - if let Some(ListEntry::ProjectHeader { path_list, .. }) = - self.contents.entries.get(i) + if let Some(ListEntry::ProjectHeader { key, .. }) = self.contents.entries.get(i) { - let path_list = path_list.clone(); self.selection = Some(i); - self.collapsed_groups.insert(path_list); + self.collapsed_groups.insert(key.path_list().clone()); self.update_entries(cx); break; } @@ -2277,7 +2294,10 @@ impl Sidebar { let header_ix = match self.contents.entries.get(ix) { Some(ListEntry::ProjectHeader { .. }) => Some(ix), Some( - ListEntry::Thread(_) | ListEntry::ViewMore { .. } | ListEntry::NewThread { .. }, + ListEntry::Thread(_) + | ListEntry::ViewMore { .. } + | ListEntry::NewThread { .. } + | ListEntry::DraftThread { .. }, ) => (0..ix).rev().find(|&i| { matches!( self.contents.entries.get(i), @@ -2288,15 +2308,14 @@ impl Sidebar { }; if let Some(header_ix) = header_ix { - if let Some(ListEntry::ProjectHeader { path_list, .. }) = - self.contents.entries.get(header_ix) + if let Some(ListEntry::ProjectHeader { key, .. }) = self.contents.entries.get(header_ix) { - let path_list = path_list.clone(); - if self.collapsed_groups.contains(&path_list) { - self.collapsed_groups.remove(&path_list); + let path_list = key.path_list(); + if self.collapsed_groups.contains(path_list) { + self.collapsed_groups.remove(path_list); } else { self.selection = Some(header_ix); - self.collapsed_groups.insert(path_list); + self.collapsed_groups.insert(path_list.clone()); } self.update_entries(cx); } @@ -2310,8 +2329,8 @@ impl Sidebar { cx: &mut Context, ) { for entry in &self.contents.entries { - if let ListEntry::ProjectHeader { path_list, .. } = entry { - self.collapsed_groups.insert(path_list.clone()); + if let ListEntry::ProjectHeader { key, .. } = entry { + self.collapsed_groups.insert(key.path_list().clone()); } } self.update_entries(cx); @@ -2332,7 +2351,7 @@ impl Sidebar { return; }; - let workspaces = multi_workspace.read(cx).workspaces().to_vec(); + let workspaces: Vec<_> = multi_workspace.read(cx).workspaces().cloned().collect(); for workspace in workspaces { if let Some(agent_panel) = workspace.read(cx).panel::(cx) { let cancelled = @@ -2366,17 +2385,18 @@ impl Sidebar { }); // Find the workspace that owns this thread's project group by - // walking backwards to the nearest ProjectHeader. We must use - // *this* workspace (not the active workspace) because the user - // might be archiving a thread in a non-active group. + // walking backwards to the nearest ProjectHeader and looking up + // an open workspace for that group's path_list. let group_workspace = current_pos.and_then(|pos| { - self.contents.entries[..pos] - .iter() - .rev() - .find_map(|e| match e { - ListEntry::ProjectHeader { workspace, .. } => Some(workspace.clone()), - _ => None, - }) + let path_list = + self.contents.entries[..pos] + .iter() + .rev() + .find_map(|e| match e { + ListEntry::ProjectHeader { key, .. } => Some(key.path_list()), + _ => None, + })?; + self.workspace_for_group(path_list, cx) }); let next_thread = current_pos.and_then(|pos| { @@ -2491,28 +2511,26 @@ impl Sidebar { .insert(session_id.clone(), Utc::now()); } - fn mru_threads_for_switcher(&self, _cx: &App) -> Vec { + fn mru_threads_for_switcher(&self, cx: &App) -> Vec { let mut current_header_label: Option = None; - let mut current_header_workspace: Option> = None; + let mut current_header_path_list: Option = None; let mut entries: Vec = self .contents .entries .iter() .filter_map(|entry| match entry { - ListEntry::ProjectHeader { - label, workspace, .. - } => { + ListEntry::ProjectHeader { label, key, .. } => { current_header_label = Some(label.clone()); - current_header_workspace = Some(workspace.clone()); + current_header_path_list = Some(key.path_list().clone()); None } ListEntry::Thread(thread) => { let workspace = match &thread.workspace { - ThreadEntryWorkspace::Open(workspace) => workspace.clone(), - ThreadEntryWorkspace::Closed(_) => { - current_header_workspace.as_ref()?.clone() - } - }; + ThreadEntryWorkspace::Open(workspace) => Some(workspace.clone()), + ThreadEntryWorkspace::Closed(_) => current_header_path_list + .as_ref() + .and_then(|pl| self.workspace_for_group(pl, cx)), + }?; let notified = self .contents .is_thread_notified(&thread.metadata.session_id); @@ -2787,7 +2805,7 @@ impl Sidebar { let color = cx.theme().colors(); let sidebar_bg = color .title_bar_background - .blend(color.panel_background.opacity(0.32)); + .blend(color.panel_background.opacity(0.25)); let timestamp = format_history_entry_timestamp( self.thread_last_message_sent_or_queued @@ -2926,7 +2944,6 @@ impl Sidebar { .map(|mw| { mw.read(cx) .workspaces() - .iter() .filter_map(|ws| ws.read(cx).database_id()) .collect() }) @@ -3019,7 +3036,9 @@ impl Sidebar { .rev() .find(|&&header_ix| header_ix <= selected_ix) .and_then(|&header_ix| match &self.contents.entries[header_ix] { - ListEntry::ProjectHeader { workspace, .. } => Some(workspace.clone()), + ListEntry::ProjectHeader { key, .. } => { + self.workspace_for_group(key.path_list(), cx) + } _ => None, }) } else { @@ -3062,11 +3081,9 @@ impl Sidebar { }); } - fn render_new_thread( + fn render_draft_thread( &self, ix: usize, - _path_list: &PathList, - workspace: &Entity, is_active: bool, worktrees: &[WorktreeInfo], is_selected: bool, @@ -3074,12 +3091,48 @@ impl Sidebar { ) -> AnyElement { let label: SharedString = if is_active { self.active_draft_text(cx) - .unwrap_or_else(|| DEFAULT_THREAD_TITLE.into()) + .unwrap_or_else(|| "Untitled Thread".into()) } else { - DEFAULT_THREAD_TITLE.into() + "Untitled Thread".into() }; - let workspace = workspace.clone(); + let id = SharedString::from(format!("draft-thread-btn-{}", ix)); + + let thread_item = ThreadItem::new(id, label) + .icon(IconName::Plus) + .icon_color(Color::Custom(cx.theme().colors().icon_muted.opacity(0.8))) + .worktrees( + worktrees + .iter() + .map(|wt| ThreadItemWorktreeInfo { + name: wt.name.clone(), + full_path: wt.full_path.clone(), + highlight_positions: wt.highlight_positions.clone(), + }) + .collect(), + ) + .selected(true) + .focused(is_selected); + + div() + .on_mouse_down(gpui::MouseButton::Left, |_, _, cx| { + cx.stop_propagation(); + }) + .child(thread_item) + .into_any_element() + } + + fn render_new_thread( + &self, + ix: usize, + key: &ProjectGroupKey, + worktrees: &[WorktreeInfo], + is_selected: bool, + cx: &mut Context, + ) -> AnyElement { + let label: SharedString = DEFAULT_THREAD_TITLE.into(); + let path_list = key.path_list().clone(); + let id = SharedString::from(format!("new-thread-btn-{}", ix)); let thread_item = ThreadItem::new(id, label) @@ -3095,25 +3148,18 @@ impl Sidebar { }) .collect(), ) - .selected(is_active) + .selected(false) .focused(is_selected) - .when(!is_active, |this| { - this.on_click(cx.listener(move |this, _, window, cx| { - this.selection = None; + .on_click(cx.listener(move |this, _, window, cx| { + this.selection = None; + if let Some(workspace) = this.workspace_for_group(&path_list, cx) { this.create_new_thread(&workspace, window, cx); - })) - }); + } else { + this.open_workspace_for_group(&path_list, window, cx); + } + })); - if is_active { - div() - .on_mouse_down(gpui::MouseButton::Left, |_, _, cx| { - cx.stop_propagation(); - }) - .child(thread_item) - .into_any_element() - } else { - thread_item.into_any_element() - } + thread_item.into_any_element() } fn render_no_results(&self, cx: &mut Context) -> impl IntoElement { @@ -3365,12 +3411,9 @@ impl Sidebar { } fn active_workspace(&self, cx: &App) -> Option> { - self.multi_workspace.upgrade().and_then(|w| { - w.read(cx) - .workspaces() - .get(w.read(cx).active_workspace_index()) - .cloned() - }) + self.multi_workspace + .upgrade() + .map(|w| w.read(cx).workspace().clone()) } fn show_thread_import_modal(&mut self, window: &mut Window, cx: &mut Context) { @@ -3478,12 +3521,11 @@ impl Sidebar { } fn show_archive(&mut self, window: &mut Window, cx: &mut Context) { - let Some(active_workspace) = self.multi_workspace.upgrade().and_then(|w| { - w.read(cx) - .workspaces() - .get(w.read(cx).active_workspace_index()) - .cloned() - }) else { + let Some(active_workspace) = self + .multi_workspace + .upgrade() + .map(|w| w.read(cx).workspace().clone()) + else { return; }; let Some(agent_panel) = active_workspace.read(cx).panel::(cx) else { @@ -3644,7 +3686,7 @@ impl Render for Sidebar { let color = cx.theme().colors(); let bg = color .title_bar_background - .blend(color.panel_background.opacity(0.32)); + .blend(color.panel_background.opacity(0.25)); let no_open_projects = !self.contents.has_open_projects; let no_search_results = self.contents.entries.is_empty(); @@ -3785,21 +3827,27 @@ pub fn dump_workspace_info( let multi_workspace = workspace.multi_workspace().and_then(|weak| weak.upgrade()); let workspaces: Vec> = match &multi_workspace { - Some(mw) => mw.read(cx).workspaces().to_vec(), + Some(mw) => mw.read(cx).workspaces().cloned().collect(), None => vec![this_entity.clone()], }; - let active_index = multi_workspace + let active_workspace = multi_workspace .as_ref() - .map(|mw| mw.read(cx).active_workspace_index()); + .map(|mw| mw.read(cx).workspace().clone()); writeln!(output, "MultiWorkspace: {} workspace(s)", workspaces.len()).ok(); - if let Some(index) = active_index { - writeln!(output, "Active workspace index: {index}").ok(); + + if let Some(mw) = &multi_workspace { + let keys: Vec<_> = mw.read(cx).project_group_keys().cloned().collect(); + writeln!(output, "Project group keys ({}):", keys.len()).ok(); + for key in keys { + writeln!(output, " - {key:?}").ok(); + } } + writeln!(output).ok(); for (index, ws) in workspaces.iter().enumerate() { - let is_active = active_index == Some(index); + let is_active = active_workspace.as_ref() == Some(ws); writeln!( output, "--- Workspace {index}{} ---", diff --git a/crates/sidebar/src/sidebar_tests.rs b/crates/sidebar/src/sidebar_tests.rs index b9bd873d369a44d3e09db9771383c111ead2ccb6..60881acfe9461f7897d6013831970444b7a65544 100644 --- a/crates/sidebar/src/sidebar_tests.rs +++ b/crates/sidebar/src/sidebar_tests.rs @@ -12,7 +12,10 @@ use gpui::TestAppContext; use pretty_assertions::assert_eq; use project::AgentId; use settings::SettingsStore; -use std::{path::PathBuf, sync::Arc}; +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; use util::path_list::PathList; fn init_test(cx: &mut TestAppContext) { @@ -74,6 +77,18 @@ async fn init_test_project( fn setup_sidebar( multi_workspace: &Entity, cx: &mut gpui::VisualTestContext, +) -> Entity { + let sidebar = setup_sidebar_closed(multi_workspace, cx); + multi_workspace.update_in(cx, |mw, window, cx| { + mw.toggle_sidebar(window, cx); + }); + cx.run_until_parked(); + sidebar +} + +fn setup_sidebar_closed( + multi_workspace: &Entity, + cx: &mut gpui::VisualTestContext, ) -> Entity { let multi_workspace = multi_workspace.clone(); let sidebar = @@ -85,14 +100,18 @@ fn setup_sidebar( sidebar } -async fn save_n_test_threads(count: u32, path_list: &PathList, cx: &mut gpui::VisualTestContext) { +async fn save_n_test_threads( + count: u32, + project: &Entity, + cx: &mut gpui::VisualTestContext, +) { for i in 0..count { save_thread_metadata( acp::SessionId::new(Arc::from(format!("thread-{}", i))), format!("Thread {}", i + 1).into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, i).unwrap(), None, - path_list.clone(), + project, cx, ) } @@ -101,7 +120,7 @@ async fn save_n_test_threads(count: u32, path_list: &PathList, cx: &mut gpui::Vi async fn save_test_thread_metadata( session_id: &acp::SessionId, - path_list: PathList, + project: &Entity, cx: &mut TestAppContext, ) { save_thread_metadata( @@ -109,7 +128,7 @@ async fn save_test_thread_metadata( "Test".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list, + project, cx, ) } @@ -117,7 +136,7 @@ async fn save_test_thread_metadata( async fn save_named_thread_metadata( session_id: &str, title: &str, - path_list: &PathList, + project: &Entity, cx: &mut gpui::VisualTestContext, ) { save_thread_metadata( @@ -125,7 +144,7 @@ async fn save_named_thread_metadata( SharedString::from(title.to_string()), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list.clone(), + project, cx, ); cx.run_until_parked(); @@ -136,34 +155,36 @@ fn save_thread_metadata( title: SharedString, updated_at: DateTime, created_at: Option>, - path_list: PathList, + project: &Entity, cx: &mut TestAppContext, ) { - let metadata = ThreadMetadata { - session_id, - agent_id: agent::ZED_AGENT_ID.clone(), - title, - updated_at, - created_at, - folder_paths: path_list, - archived: false, - }; cx.update(|cx| { - ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx)) + let (folder_paths, main_worktree_paths) = { + let project_ref = project.read(cx); + let paths: Vec> = project_ref + .visible_worktrees(cx) + .map(|worktree| worktree.read(cx).abs_path()) + .collect(); + let folder_paths = PathList::new(&paths); + let main_worktree_paths = project_ref.project_group_key(cx).path_list().clone(); + (folder_paths, main_worktree_paths) + }; + let metadata = ThreadMetadata { + session_id, + agent_id: agent::ZED_AGENT_ID.clone(), + title, + updated_at, + created_at, + folder_paths, + main_worktree_paths, + archived: false, + }; + ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx)); }); cx.run_until_parked(); } -fn open_and_focus_sidebar(sidebar: &Entity, cx: &mut gpui::VisualTestContext) { - let multi_workspace = sidebar.read_with(cx, |s, _| s.multi_workspace.upgrade()); - if let Some(multi_workspace) = multi_workspace { - multi_workspace.update_in(cx, |mw, window, cx| { - if !mw.sidebar_open() { - mw.toggle_sidebar(window, cx); - } - }); - } - cx.run_until_parked(); +fn focus_sidebar(sidebar: &Entity, cx: &mut gpui::VisualTestContext) { sidebar.update_in(cx, |_, window, cx| { cx.focus_self(window); }); @@ -189,11 +210,11 @@ fn visible_entries_as_strings( match entry { ListEntry::ProjectHeader { label, - path_list, + key, highlight_positions: _, .. } => { - let icon = if sidebar.collapsed_groups.contains(path_list) { + let icon = if sidebar.collapsed_groups.contains(key.path_list()) { ">" } else { "v" @@ -244,6 +265,22 @@ fn visible_entries_as_strings( format!(" + View More{}", selected) } } + ListEntry::DraftThread { worktrees, .. } => { + let worktree = if worktrees.is_empty() { + String::new() + } else { + let mut seen = Vec::new(); + let mut chips = Vec::new(); + for wt in worktrees { + if !seen.contains(&wt.name) { + seen.push(wt.name.clone()); + chips.push(format!("{{{}}}", wt.name)); + } + } + format!(" {}", chips.join(", ")) + }; + format!(" [~ Draft{}]{}", worktree, selected) + } ListEntry::NewThread { worktrees, .. } => { let worktree = if worktrees.is_empty() { String::new() @@ -270,11 +307,14 @@ fn visible_entries_as_strings( async fn test_serialization_round_trip(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(3, &path_list, cx).await; + save_n_test_threads(3, &project, cx).await; + + let path_list = project.read_with(cx, |project, cx| { + project.project_group_key(cx).path_list().clone() + }); // Set a custom width, collapse the group, and expand "View More". sidebar.update_in(cx, |sidebar, window, cx| { @@ -433,17 +473,15 @@ async fn test_single_workspace_no_threads(cx: &mut TestAppContext) { async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_thread_metadata( acp::SessionId::new(Arc::from("thread-1")), "Fix crash in project panel".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 3, 0, 0, 0).unwrap(), None, - path_list.clone(), + &project, cx, ); @@ -452,7 +490,7 @@ async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) { "Add inline diff view".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); cx.run_until_parked(); @@ -474,18 +512,16 @@ async fn test_single_workspace_with_saved_threads(cx: &mut TestAppContext) { async fn test_workspace_lifecycle(cx: &mut TestAppContext) { let project = init_test_project("/project-a", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); // Single workspace with a thread - let path_list = PathList::new(&[std::path::PathBuf::from("/project-a")]); - save_thread_metadata( acp::SessionId::new(Arc::from("thread-a1")), "Thread A1".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); cx.run_until_parked(); @@ -511,7 +547,7 @@ async fn test_workspace_lifecycle(cx: &mut TestAppContext) { // Remove the second workspace multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = mw.workspaces()[1].clone(); + let workspace = mw.workspaces().nth(1).cloned().unwrap(); mw.remove(&workspace, window, cx); }); cx.run_until_parked(); @@ -526,11 +562,10 @@ async fn test_workspace_lifecycle(cx: &mut TestAppContext) { async fn test_view_more_pagination(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(12, &path_list, cx).await; + save_n_test_threads(12, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -553,12 +588,15 @@ async fn test_view_more_pagination(cx: &mut TestAppContext) { async fn test_view_more_batched_expansion(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); // Create 17 threads: initially shows 5, then 10, then 15, then all 17 with Collapse - save_n_test_threads(17, &path_list, cx).await; + save_n_test_threads(17, &project, cx).await; + + let path_list = project.read_with(cx, |project, cx| { + project.project_group_key(cx).path_list().clone() + }); multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -569,7 +607,7 @@ async fn test_view_more_batched_expansion(cx: &mut TestAppContext) { assert!(entries.iter().any(|e| e.contains("View More"))); // Focus and navigate to View More, then confirm to expand by one batch - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); for _ in 0..7 { cx.dispatch_action(SelectNext); } @@ -625,11 +663,14 @@ async fn test_view_more_batched_expansion(cx: &mut TestAppContext) { async fn test_collapse_and_expand_group(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(1, &path_list, cx).await; + save_n_test_threads(1, &project, cx).await; + + let path_list = project.read_with(cx, |project, cx| { + project.project_group_key(cx).path_list().clone() + }); multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -681,9 +722,8 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { s.contents.entries = vec![ // Expanded project header ListEntry::ProjectHeader { - path_list: expanded_path.clone(), + key: project::ProjectGroupKey::new(None, expanded_path.clone()), label: "expanded-project".into(), - workspace: workspace.clone(), highlight_positions: Vec::new(), has_running_threads: false, waiting_thread_count: 0, @@ -694,6 +734,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { session_id: acp::SessionId::new(Arc::from("t-1")), agent_id: AgentId::new("zed-agent"), folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), title: "Completed thread".into(), updated_at: Utc::now(), created_at: Some(Utc::now()), @@ -716,6 +757,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { session_id: acp::SessionId::new(Arc::from("t-2")), agent_id: AgentId::new("zed-agent"), folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), title: "Running thread".into(), updated_at: Utc::now(), created_at: Some(Utc::now()), @@ -738,6 +780,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { session_id: acp::SessionId::new(Arc::from("t-3")), agent_id: AgentId::new("zed-agent"), folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), title: "Error thread".into(), updated_at: Utc::now(), created_at: Some(Utc::now()), @@ -760,6 +803,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { session_id: acp::SessionId::new(Arc::from("t-4")), agent_id: AgentId::new("zed-agent"), folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), title: "Waiting thread".into(), updated_at: Utc::now(), created_at: Some(Utc::now()), @@ -782,6 +826,7 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { session_id: acp::SessionId::new(Arc::from("t-5")), agent_id: AgentId::new("zed-agent"), folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), title: "Notified thread".into(), updated_at: Utc::now(), created_at: Some(Utc::now()), @@ -800,14 +845,13 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { }), // View More entry ListEntry::ViewMore { - path_list: expanded_path.clone(), + key: project::ProjectGroupKey::new(None, expanded_path.clone()), is_fully_expanded: false, }, // Collapsed project header ListEntry::ProjectHeader { - path_list: collapsed_path.clone(), + key: project::ProjectGroupKey::new(None, collapsed_path.clone()), label: "collapsed-project".into(), - workspace: workspace.clone(), highlight_positions: Vec::new(), has_running_threads: false, waiting_thread_count: 0, @@ -863,11 +907,10 @@ async fn test_visible_entries_as_strings(cx: &mut TestAppContext) { async fn test_keyboard_select_next_and_previous(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(3, &path_list, cx).await; + save_n_test_threads(3, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -875,7 +918,7 @@ async fn test_keyboard_select_next_and_previous(cx: &mut TestAppContext) { // Entries: [header, thread3, thread2, thread1] // Focusing the sidebar does not set a selection; select_next/select_previous // handle None gracefully by starting from the first or last entry. - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None); // First SelectNext from None starts at index 0 @@ -923,15 +966,14 @@ async fn test_keyboard_select_next_and_previous(cx: &mut TestAppContext) { async fn test_keyboard_select_first_and_last(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(3, &path_list, cx).await; + save_n_test_threads(3, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); // SelectLast jumps to the end cx.dispatch_action(SelectLast); @@ -954,7 +996,7 @@ async fn test_keyboard_focus_in_does_not_set_selection(cx: &mut TestAppContext) // Open the sidebar so it's rendered, then focus it to trigger focus_in. // focus_in no longer sets a default selection. - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None); // Manually set a selection, blur, then refocus — selection should be preserved @@ -978,11 +1020,10 @@ async fn test_keyboard_focus_in_does_not_set_selection(cx: &mut TestAppContext) async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(1, &path_list, cx).await; + save_n_test_threads(1, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -992,7 +1033,7 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA ); // Focus the sidebar and select the header (index 0) - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); sidebar.update_in(cx, |sidebar, _window, _cx| { sidebar.selection = Some(0); }); @@ -1020,11 +1061,10 @@ async fn test_keyboard_confirm_on_project_header_toggles_collapse(cx: &mut TestA async fn test_keyboard_confirm_on_view_more_expands(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(8, &path_list, cx).await; + save_n_test_threads(8, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -1034,7 +1074,7 @@ async fn test_keyboard_confirm_on_view_more_expands(cx: &mut TestAppContext) { assert!(entries.iter().any(|e| e.contains("View More"))); // Focus sidebar (selection starts at None), then navigate down to the "View More" entry (index 6) - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); for _ in 0..7 { cx.dispatch_action(SelectNext); } @@ -1055,11 +1095,10 @@ async fn test_keyboard_confirm_on_view_more_expands(cx: &mut TestAppContext) { async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(1, &path_list, cx).await; + save_n_test_threads(1, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -1069,7 +1108,7 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex ); // Focus sidebar and manually select the header (index 0). Press left to collapse. - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); sidebar.update_in(cx, |sidebar, _window, _cx| { sidebar.selection = Some(0); }); @@ -1100,16 +1139,15 @@ async fn test_keyboard_expand_and_collapse_selected_entry(cx: &mut TestAppContex async fn test_keyboard_collapse_from_child_selects_parent(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(1, &path_list, cx).await; + save_n_test_threads(1, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); // Focus sidebar (selection starts at None), then navigate down to the thread (child) - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); cx.dispatch_action(SelectNext); cx.dispatch_action(SelectNext); assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); @@ -1144,7 +1182,7 @@ async fn test_keyboard_navigation_on_empty_list(cx: &mut TestAppContext) { ); // Focus sidebar — focus_in does not set a selection - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None); // First SelectNext from None starts at index 0 (header) @@ -1168,16 +1206,15 @@ async fn test_keyboard_navigation_on_empty_list(cx: &mut TestAppContext) { async fn test_selection_clamps_after_entry_removal(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_n_test_threads(1, &path_list, cx).await; + save_n_test_threads(1, &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); // Focus sidebar (selection starts at None), navigate down to the thread (index 1) - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); cx.dispatch_action(SelectNext); cx.dispatch_action(SelectNext); assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1)); @@ -1245,15 +1282,13 @@ async fn test_parallel_threads_shown_with_live_status(cx: &mut TestAppContext) { cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - // Open thread A and keep it generating. let connection = StubAgentConnection::new(); open_thread_with_connection(&panel, connection.clone(), cx); send_message(&panel, cx); let session_id_a = active_session_id(&panel, cx); - save_test_thread_metadata(&session_id_a, path_list.clone(), cx).await; + save_test_thread_metadata(&session_id_a, &project, cx).await; cx.update(|_, cx| { connection.send_update( @@ -1272,7 +1307,7 @@ async fn test_parallel_threads_shown_with_live_status(cx: &mut TestAppContext) { send_message(&panel, cx); let session_id_b = active_session_id(&panel, cx); - save_test_thread_metadata(&session_id_b, path_list.clone(), cx).await; + save_test_thread_metadata(&session_id_b, &project, cx).await; cx.run_until_parked(); @@ -1291,15 +1326,13 @@ async fn test_background_thread_completion_triggers_notification(cx: &mut TestAp cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); let (sidebar, panel_a) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]); - // Open thread on workspace A and keep it generating. let connection_a = StubAgentConnection::new(); open_thread_with_connection(&panel_a, connection_a.clone(), cx); send_message(&panel_a, cx); let session_id_a = active_session_id(&panel_a, cx); - save_test_thread_metadata(&session_id_a, path_list_a.clone(), cx).await; + save_test_thread_metadata(&session_id_a, &project_a, cx).await; cx.update(|_, cx| { connection_a.send_update( @@ -1349,11 +1382,9 @@ fn type_in_search(sidebar: &Entity, query: &str, cx: &mut gpui::VisualT async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - for (id, title, hour) in [ ("t-1", "Fix crash in project panel", 3), ("t-2", "Add inline diff view", 2), @@ -1364,7 +1395,7 @@ async fn test_search_narrows_visible_threads_to_matches(cx: &mut TestAppContext) title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), None, - path_list.clone(), + &project, cx, ); } @@ -1402,17 +1433,15 @@ async fn test_search_matches_regardless_of_case(cx: &mut TestAppContext) { // Search should match case-insensitively so they can still find it. let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_thread_metadata( acp::SessionId::new(Arc::from("thread-1")), "Fix Crash In Project Panel".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); cx.run_until_parked(); @@ -1444,18 +1473,16 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex // to dismiss the filter and see the full list again. let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - for (id, title, hour) in [("t-1", "Alpha thread", 2), ("t-2", "Beta thread", 1)] { save_thread_metadata( acp::SessionId::new(Arc::from(id)), title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), None, - path_list.clone(), + &project, cx, ) } @@ -1468,7 +1495,7 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex ); // User types a search query to filter down. - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); type_in_search(&sidebar, "alpha", cx); assert_eq!( visible_entries_as_strings(&sidebar, cx), @@ -1493,11 +1520,9 @@ async fn test_escape_clears_search_and_restores_full_list(cx: &mut TestAppContex async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppContext) { let project_a = init_test_project("/project-a", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]); - for (id, title, hour) in [ ("a1", "Fix bug in sidebar", 2), ("a2", "Add tests for editor", 1), @@ -1507,7 +1532,7 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), None, - path_list_a.clone(), + &project_a, cx, ) } @@ -1518,7 +1543,9 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC }); cx.run_until_parked(); - let path_list_b = PathList::new::(&[]); + let project_b = multi_workspace.read_with(cx, |mw, cx| { + mw.workspaces().nth(1).unwrap().read(cx).project().clone() + }); for (id, title, hour) in [ ("b1", "Refactor sidebar layout", 3), @@ -1529,7 +1556,7 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), None, - path_list_b.clone(), + &project_b, cx, ) } @@ -1575,11 +1602,9 @@ async fn test_search_only_shows_workspace_headers_with_matches(cx: &mut TestAppC async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { let project_a = init_test_project("/alpha-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list_a = PathList::new(&[std::path::PathBuf::from("/alpha-project")]); - for (id, title, hour) in [ ("a1", "Fix bug in sidebar", 2), ("a2", "Add tests for editor", 1), @@ -1589,7 +1614,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), None, - path_list_a.clone(), + &project_a, cx, ) } @@ -1600,7 +1625,9 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { }); cx.run_until_parked(); - let path_list_b = PathList::new::(&[]); + let project_b = multi_workspace.read_with(cx, |mw, cx| { + mw.workspaces().nth(1).unwrap().read(cx).project().clone() + }); for (id, title, hour) in [ ("b1", "Refactor sidebar layout", 3), @@ -1611,7 +1638,7 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), None, - path_list_b.clone(), + &project_b, cx, ) } @@ -1677,11 +1704,9 @@ async fn test_search_matches_workspace_name(cx: &mut TestAppContext) { async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - // Create 8 threads. The oldest one has a unique name and will be // behind View More (only 5 shown by default). for i in 0..8u32 { @@ -1695,7 +1720,7 @@ async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppConte title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, i).unwrap(), None, - path_list.clone(), + &project, cx, ) } @@ -1729,24 +1754,22 @@ async fn test_search_finds_threads_hidden_behind_view_more(cx: &mut TestAppConte async fn test_search_finds_threads_inside_collapsed_groups(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_thread_metadata( acp::SessionId::new(Arc::from("thread-1")), "Important thread".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); cx.run_until_parked(); // User focuses the sidebar and collapses the group using keyboard: // manually select the header, then press SelectParent to collapse. - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); sidebar.update_in(cx, |sidebar, _window, _cx| { sidebar.selection = Some(0); }); @@ -1770,11 +1793,9 @@ async fn test_search_finds_threads_inside_collapsed_groups(cx: &mut TestAppConte async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - for (id, title, hour) in [ ("t-1", "Fix crash in panel", 3), ("t-2", "Fix lint warnings", 2), @@ -1785,13 +1806,13 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) title.into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, hour, 0, 0).unwrap(), None, - path_list.clone(), + &project, cx, ) } cx.run_until_parked(); - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); // User types "fix" — two threads match. type_in_search(&sidebar, "fix", cx); @@ -1832,7 +1853,7 @@ async fn test_search_then_keyboard_navigate_and_confirm(cx: &mut TestAppContext) async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); multi_workspace.update_in(cx, |mw, window, cx| { @@ -1840,14 +1861,19 @@ async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppC }); cx.run_until_parked(); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); + let (workspace_0, workspace_1) = multi_workspace.read_with(cx, |mw, _| { + ( + mw.workspaces().next().unwrap().clone(), + mw.workspaces().nth(1).unwrap().clone(), + ) + }); save_thread_metadata( acp::SessionId::new(Arc::from("hist-1")), "Historical Thread".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 6, 1, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); cx.run_until_parked(); @@ -1861,13 +1887,13 @@ async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppC // Switch to workspace 1 so we can verify the confirm switches back. multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = mw.workspaces()[1].clone(); + let workspace = mw.workspaces().nth(1).unwrap().clone(); mw.activate(workspace, window, cx); }); cx.run_until_parked(); assert_eq!( - multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), - 1 + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_1 ); // Confirm on the historical (non-live) thread at index 1. @@ -1881,8 +1907,8 @@ async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppC cx.run_until_parked(); assert_eq!( - multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), - 0 + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_0 ); } @@ -1890,17 +1916,15 @@ async fn test_confirm_on_historical_thread_activates_workspace(cx: &mut TestAppC async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_thread_metadata( acp::SessionId::new(Arc::from("t-1")), "Thread A".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), None, - path_list.clone(), + &project, cx, ); @@ -1909,7 +1933,7 @@ async fn test_click_clears_selection_and_focus_in_restores_it(cx: &mut TestAppCo "Thread B".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); @@ -1957,8 +1981,6 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext) cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - let connection = StubAgentConnection::new(); connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( acp::ContentChunk::new("Hi there!".into()), @@ -1967,7 +1989,7 @@ async fn test_thread_title_update_propagates_to_sidebar(cx: &mut TestAppContext) send_message(&panel, cx); let session_id = active_session_id(&panel, cx); - save_test_thread_metadata(&session_id, path_list.clone(), cx).await; + save_test_thread_metadata(&session_id, &project, cx).await; cx.run_until_parked(); assert_eq!( @@ -2005,8 +2027,6 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); let (sidebar, panel_a) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]); - // Save a thread so it appears in the list. let connection_a = StubAgentConnection::new(); connection_a.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( @@ -2015,7 +2035,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { open_thread_with_connection(&panel_a, connection_a, cx); send_message(&panel_a, cx); let session_id_a = active_session_id(&panel_a, cx); - save_test_thread_metadata(&session_id_a, path_list_a.clone(), cx).await; + save_test_thread_metadata(&session_id_a, &project_a, cx).await; // Add a second workspace with its own agent panel. let fs = cx.update(|_, cx| ::global(cx)); @@ -2029,7 +2049,8 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { let panel_b = add_agent_panel(&workspace_b, cx); cx.run_until_parked(); - let workspace_a = multi_workspace.read_with(cx, |mw, _cx| mw.workspaces()[0].clone()); + let workspace_a = + multi_workspace.read_with(cx, |mw, _cx| mw.workspaces().next().unwrap().clone()); // ── 1. Initial state: focused thread derived from active panel ───── sidebar.read_with(cx, |sidebar, _cx| { @@ -2049,6 +2070,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { updated_at: Utc::now(), created_at: None, folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), archived: false, }, &workspace_a, @@ -2089,8 +2111,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { open_thread_with_connection(&panel_b, connection_b, cx); send_message(&panel_b, cx); let session_id_b = active_session_id(&panel_b, cx); - let path_list_b = PathList::new(&[std::path::PathBuf::from("/project-b")]); - save_test_thread_metadata(&session_id_b, path_list_b.clone(), cx).await; + save_test_thread_metadata(&session_id_b, &project_b, cx).await; cx.run_until_parked(); // Workspace A is currently active. Click a thread in workspace B, @@ -2104,6 +2125,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { updated_at: Utc::now(), created_at: None, folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), archived: false, }, &workspace_b, @@ -2126,7 +2148,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { }); multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = mw.workspaces()[0].clone(); + let workspace = mw.workspaces().next().unwrap().clone(); mw.activate(workspace, window, cx); }); cx.run_until_parked(); @@ -2150,7 +2172,7 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { open_thread_with_connection(&panel_b, connection_b2, cx); send_message(&panel_b, cx); let session_id_b2 = active_session_id(&panel_b, cx); - save_test_thread_metadata(&session_id_b2, path_list_b.clone(), cx).await; + save_test_thread_metadata(&session_id_b2, &project_b, cx).await; cx.run_until_parked(); // Panel B is not the active workspace's panel (workspace A is @@ -2181,8 +2203,8 @@ async fn test_focused_thread_tracks_user_intent(cx: &mut TestAppContext) { // Switching workspaces via the multi_workspace (simulates clicking // a workspace header) should clear focused_thread. multi_workspace.update_in(cx, |mw, window, cx| { - if let Some(index) = mw.workspaces().iter().position(|w| w == &workspace_b) { - let workspace = mw.workspaces()[index].clone(); + let workspace = mw.workspaces().find(|w| *w == &workspace_b).cloned(); + if let Some(workspace) = workspace { mw.activate(workspace, window, cx); } }); @@ -2232,8 +2254,6 @@ async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContex cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list_a = PathList::new(&[std::path::PathBuf::from("/project-a")]); - // Start a thread and send a message so it has history. let connection = StubAgentConnection::new(); connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( @@ -2242,7 +2262,7 @@ async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContex open_thread_with_connection(&panel, connection, cx); send_message(&panel, cx); let session_id = active_session_id(&panel, cx); - save_test_thread_metadata(&session_id, path_list_a.clone(), cx).await; + save_test_thread_metadata(&session_id, &project, cx).await; cx.run_until_parked(); // Verify the thread appears in the sidebar. @@ -2276,9 +2296,15 @@ async fn test_new_thread_button_works_after_adding_folder(cx: &mut TestAppContex // The workspace path_list is now [project-a, project-b]. The active // thread's metadata was re-saved with the new paths by the agent panel's // project subscription, so it stays visible under the updated group. + // The old [project-a] group persists in the sidebar (empty) because + // project_group_keys is append-only. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [project-a, project-b]", " Hello *",] + vec![ + "v [project-a, project-b]", // + " Hello *", + "v [project-a]", + ] ); // The "New Thread" button must still be clickable (not stuck in @@ -2323,8 +2349,6 @@ async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) { cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - // Create a non-empty thread (has messages). let connection = StubAgentConnection::new(); connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( @@ -2334,7 +2358,7 @@ async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) { send_message(&panel, cx); let session_id = active_session_id(&panel, cx); - save_test_thread_metadata(&session_id, path_list.clone(), cx).await; + save_test_thread_metadata(&session_id, &project, cx).await; cx.run_until_parked(); assert_eq!( @@ -2354,8 +2378,8 @@ async fn test_cmd_n_shows_new_thread_entry(cx: &mut TestAppContext) { assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " [+ New Thread]", " Hello *"], - "After Cmd-N the sidebar should show a highlighted New Thread entry" + vec!["v [my-project]", " [~ Draft]", " Hello *"], + "After Cmd-N the sidebar should show a highlighted Draft entry" ); sidebar.read_with(cx, |sidebar, _cx| { @@ -2374,8 +2398,6 @@ async fn test_draft_with_server_session_shows_as_draft(cx: &mut TestAppContext) cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - // Create a saved thread so the workspace has history. let connection = StubAgentConnection::new(); connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( @@ -2384,7 +2406,7 @@ async fn test_draft_with_server_session_shows_as_draft(cx: &mut TestAppContext) open_thread_with_connection(&panel, connection, cx); send_message(&panel, cx); let saved_session_id = active_session_id(&panel, cx); - save_test_thread_metadata(&saved_session_id, path_list.clone(), cx).await; + save_test_thread_metadata(&saved_session_id, &project, cx).await; cx.run_until_parked(); assert_eq!( @@ -2401,8 +2423,7 @@ async fn test_draft_with_server_session_shows_as_draft(cx: &mut TestAppContext) assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec!["v [my-project]", " [+ New Thread]", " Hello *"], - "Draft with a server session should still show as [+ New Thread]" + vec!["v [my-project]", " [~ Draft]", " Hello *"], ); let workspace = multi_workspace.read_with(cx, |mw, _cx| mw.workspace().clone()); @@ -2435,38 +2456,24 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp fs.insert_tree( "/project", serde_json::json!({ - ".git": { - "worktrees": { - "feature-a": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature-a", - }, - }, - }, + ".git": {}, "src": {}, }), ) .await; // Worktree checkout pointing back to the main repo. - fs.insert_tree( - "/wt-feature-a", - serde_json::json!({ - ".git": "gitdir: /project/.git/worktrees/feature-a", - "src": {}, - }), - ) - .await; - - fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| { - state.worktrees.push(git::repository::Worktree { + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { path: std::path::PathBuf::from("/wt-feature-a"), ref_name: Some("refs/heads/feature-a".into()), sha: "aaa".into(), is_main: false, - }); - }) - .unwrap(); + }, + ) + .await; cx.update(|cx| ::set_global(fs.clone(), cx)); @@ -2483,6 +2490,8 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + let worktree_workspace = multi_workspace.update_in(cx, |mw, window, cx| { mw.test_add_workspace(worktree_project.clone(), window, cx) }); @@ -2491,12 +2500,10 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp // Switch to the worktree workspace. multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = mw.workspaces()[1].clone(); + let workspace = mw.workspaces().nth(1).unwrap().clone(); mw.activate(workspace, window, cx); }); - let sidebar = setup_sidebar(&multi_workspace, cx); - // Create a non-empty thread in the worktree workspace. let connection = StubAgentConnection::new(); connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk( @@ -2506,17 +2513,12 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp send_message(&worktree_panel, cx); let session_id = active_session_id(&worktree_panel, cx); - let wt_path_list = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_test_thread_metadata(&session_id, wt_path_list, cx).await; + save_test_thread_metadata(&session_id, &worktree_project, cx).await; cx.run_until_parked(); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - "v [project]", - " [+ New Thread]", - " Hello {wt-feature-a} *" - ] + vec!["v [project]", " Hello {wt-feature-a} *"] ); // Simulate Cmd-N in the worktree workspace. @@ -2532,12 +2534,11 @@ async fn test_cmd_n_shows_new_thread_entry_in_absorbed_worktree(cx: &mut TestApp visible_entries_as_strings(&sidebar, cx), vec![ "v [project]", - " [+ New Thread]", - " [+ New Thread {wt-feature-a}]", + " [~ Draft {wt-feature-a}]", " Hello {wt-feature-a} *" ], "After Cmd-N in an absorbed worktree, the sidebar should show \ - a highlighted New Thread entry under the main repo header" + a highlighted Draft entry under the main repo header" ); sidebar.read_with(cx, |sidebar, _cx| { @@ -2573,28 +2574,33 @@ async fn test_search_matches_worktree_name(cx: &mut TestAppContext) { let (project, fs) = init_test_project_with_git("/project", cx).await; fs.as_fake() - .with_git_state(std::path::Path::new("/project/.git"), false, |state| { - state.worktrees.push(git::repository::Worktree { + .add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { path: std::path::PathBuf::from("/wt/rosewood"), ref_name: Some("refs/heads/rosewood".into()), sha: "abc".into(), is_main: false, - }); - }) - .unwrap(); + }, + ) + .await; project .update(cx, |project, cx| project.git_scans_complete(cx)) .await; + let worktree_project = project::Project::test(fs.clone(), ["/wt/rosewood".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let main_paths = PathList::new(&[std::path::PathBuf::from("/project")]); - let wt_paths = PathList::new(&[std::path::PathBuf::from("/wt/rosewood")]); - save_named_thread_metadata("main-t", "Unrelated Thread", &main_paths, cx).await; - save_named_thread_metadata("wt-t", "Fix Bug", &wt_paths, cx).await; + save_named_thread_metadata("main-t", "Unrelated Thread", &project, cx).await; + save_named_thread_metadata("wt-t", "Fix Bug", &worktree_project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -2616,13 +2622,17 @@ async fn test_git_worktree_added_live_updates_sidebar(cx: &mut TestAppContext) { .update(cx, |project, cx| project.git_scans_complete(cx)) .await; + let worktree_project = project::Project::test(fs.clone(), ["/wt/rosewood".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); // Save a thread against a worktree path that doesn't exist yet. - let wt_paths = PathList::new(&[std::path::PathBuf::from("/wt/rosewood")]); - save_named_thread_metadata("wt-thread", "Worktree Thread", &wt_paths, cx).await; + save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -2635,25 +2645,23 @@ async fn test_git_worktree_added_live_updates_sidebar(cx: &mut TestAppContext) { // Now add the worktree to the git state and trigger a rescan. fs.as_fake() - .with_git_state(std::path::Path::new("/project/.git"), true, |state| { - state.worktrees.push(git::repository::Worktree { + .add_linked_worktree_for_repo( + Path::new("/project/.git"), + true, + git::repository::Worktree { path: std::path::PathBuf::from("/wt/rosewood"), ref_name: Some("refs/heads/rosewood".into()), sha: "abc".into(), is_main: false, - }); - }) - .unwrap(); + }, + ) + .await; cx.run_until_parked(); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - "v [project]", - " [+ New Thread]", - " Worktree Thread {rosewood}", - ] + vec!["v [project]", " Worktree Thread {rosewood}",] ); } @@ -2667,16 +2675,6 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC "/project", serde_json::json!({ ".git": { - "worktrees": { - "feature-a": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature-a", - }, - "feature-b": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature-b", - }, - }, }, "src": {}, }), @@ -2684,20 +2682,26 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC .await; // Two worktree checkouts whose .git files point back to the main repo. - fs.insert_tree( - "/wt-feature-a", - serde_json::json!({ - ".git": "gitdir: /project/.git/worktrees/feature-a", - "src": {}, - }), + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "aaa".into(), + is_main: false, + }, ) .await; - fs.insert_tree( - "/wt-feature-b", - serde_json::json!({ - ".git": "gitdir: /project/.git/worktrees/feature-b", - "src": {}, - }), + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-b"), + ref_name: Some("refs/heads/feature-b".into()), + sha: "bbb".into(), + is_main: false, + }, ) .await; @@ -2717,10 +2721,8 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC }); let sidebar = setup_sidebar(&multi_workspace, cx); - let paths_a = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - let paths_b = PathList::new(&[std::path::PathBuf::from("/wt-feature-b")]); - save_named_thread_metadata("thread-a", "Thread A", &paths_a, cx).await; - save_named_thread_metadata("thread-b", "Thread B", &paths_b, cx).await; + save_named_thread_metadata("thread-a", "Thread A", &project_a, cx).await; + save_named_thread_metadata("thread-b", "Thread B", &project_b, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -2735,24 +2737,6 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC ] ); - // Configure the main repo to list both worktrees before opening - // it so the initial git scan picks them up. - fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| { - state.worktrees.push(git::repository::Worktree { - path: std::path::PathBuf::from("/wt-feature-a"), - ref_name: Some("refs/heads/feature-a".into()), - sha: "aaa".into(), - is_main: false, - }); - state.worktrees.push(git::repository::Worktree { - path: std::path::PathBuf::from("/wt-feature-b"), - ref_name: Some("refs/heads/feature-b".into()), - sha: "bbb".into(), - is_main: false, - }); - }) - .unwrap(); - let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; main_project .update(cx, |p, cx| p.git_scans_complete(cx)) @@ -2769,7 +2753,6 @@ async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppC visible_entries_as_strings(&sidebar, cx), vec![ "v [project]", - " [+ New Thread]", " Thread A {wt-feature-a}", " Thread B {wt-feature-b}", ] @@ -2788,54 +2771,33 @@ async fn test_threadless_workspace_shows_new_thread_with_worktree_chip(cx: &mut fs.insert_tree( "/project", serde_json::json!({ - ".git": { - "worktrees": { - "feature-a": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature-a", - }, - "feature-b": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature-b", - }, - }, - }, - "src": {}, - }), - ) - .await; - fs.insert_tree( - "/wt-feature-a", - serde_json::json!({ - ".git": "gitdir: /project/.git/worktrees/feature-a", - "src": {}, - }), - ) - .await; - fs.insert_tree( - "/wt-feature-b", - serde_json::json!({ - ".git": "gitdir: /project/.git/worktrees/feature-b", + ".git": {}, "src": {}, }), ) .await; - - fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| { - state.worktrees.push(git::repository::Worktree { + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { path: std::path::PathBuf::from("/wt-feature-a"), ref_name: Some("refs/heads/feature-a".into()), sha: "aaa".into(), is_main: false, - }); - state.worktrees.push(git::repository::Worktree { + }, + ) + .await; + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { path: std::path::PathBuf::from("/wt-feature-b"), ref_name: Some("refs/heads/feature-b".into()), sha: "bbb".into(), is_main: false, - }); - }) - .unwrap(); + }, + ) + .await; cx.update(|cx| ::set_global(fs.clone(), cx)); @@ -2855,8 +2817,7 @@ async fn test_threadless_workspace_shows_new_thread_with_worktree_chip(cx: &mut let sidebar = setup_sidebar(&multi_workspace, cx); // Only save a thread for workspace A. - let paths_a = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_named_thread_metadata("thread-a", "Thread A", &paths_a, cx).await; + save_named_thread_metadata("thread-a", "Thread A", &project_a, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -2884,18 +2845,7 @@ async fn test_multi_worktree_thread_shows_multiple_chips(cx: &mut TestAppContext fs.insert_tree( "/project_a", serde_json::json!({ - ".git": { - "worktrees": { - "olivetti": { - "commondir": "../../", - "HEAD": "ref: refs/heads/olivetti", - }, - "selectric": { - "commondir": "../../", - "HEAD": "ref: refs/heads/selectric", - }, - }, - }, + ".git": {}, "src": {}, }), ) @@ -2903,56 +2853,28 @@ async fn test_multi_worktree_thread_shows_multiple_chips(cx: &mut TestAppContext fs.insert_tree( "/project_b", serde_json::json!({ - ".git": { - "worktrees": { - "olivetti": { - "commondir": "../../", - "HEAD": "ref: refs/heads/olivetti", - }, - "selectric": { - "commondir": "../../", - "HEAD": "ref: refs/heads/selectric", - }, - }, - }, + ".git": {}, "src": {}, }), ) .await; // Worktree checkouts. - for (repo, branch) in &[ - ("project_a", "olivetti"), - ("project_a", "selectric"), - ("project_b", "olivetti"), - ("project_b", "selectric"), - ] { - let worktree_path = format!("/worktrees/{repo}/{branch}/{repo}"); - let gitdir = format!("gitdir: /{repo}/.git/worktrees/{branch}"); - fs.insert_tree( - &worktree_path, - serde_json::json!({ - ".git": gitdir, - "src": {}, - }), - ) - .await; - } - - // Register linked worktrees. for repo in &["project_a", "project_b"] { let git_path = format!("/{repo}/.git"); - fs.with_git_state(std::path::Path::new(&git_path), false, |state| { - for branch in &["olivetti", "selectric"] { - state.worktrees.push(git::repository::Worktree { + for branch in &["olivetti", "selectric"] { + fs.add_linked_worktree_for_repo( + Path::new(&git_path), + false, + git::repository::Worktree { path: std::path::PathBuf::from(format!("/worktrees/{repo}/{branch}/{repo}")), ref_name: Some(format!("refs/heads/{branch}").into()), sha: "aaa".into(), is_main: false, - }); - } - }) - .unwrap(); + }, + ) + .await; + } } cx.update(|cx| ::set_global(fs.clone(), cx)); @@ -2975,11 +2897,7 @@ async fn test_multi_worktree_thread_shows_multiple_chips(cx: &mut TestAppContext let sidebar = setup_sidebar(&multi_workspace, cx); // Save a thread under the same paths as the workspace roots. - let thread_paths = PathList::new(&[ - std::path::PathBuf::from("/worktrees/project_a/olivetti/project_a"), - std::path::PathBuf::from("/worktrees/project_b/selectric/project_b"), - ]); - save_named_thread_metadata("wt-thread", "Cross Worktree Thread", &thread_paths, cx).await; + save_named_thread_metadata("wt-thread", "Cross Worktree Thread", &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -3005,14 +2923,7 @@ async fn test_same_named_worktree_chips_are_deduplicated(cx: &mut TestAppContext fs.insert_tree( "/project_a", serde_json::json!({ - ".git": { - "worktrees": { - "olivetti": { - "commondir": "../../", - "HEAD": "ref: refs/heads/olivetti", - }, - }, - }, + ".git": {}, "src": {}, }), ) @@ -3020,41 +2931,25 @@ async fn test_same_named_worktree_chips_are_deduplicated(cx: &mut TestAppContext fs.insert_tree( "/project_b", serde_json::json!({ - ".git": { - "worktrees": { - "olivetti": { - "commondir": "../../", - "HEAD": "ref: refs/heads/olivetti", - }, - }, - }, + ".git": {}, "src": {}, }), ) .await; for repo in &["project_a", "project_b"] { - let worktree_path = format!("/worktrees/{repo}/olivetti/{repo}"); - let gitdir = format!("gitdir: /{repo}/.git/worktrees/olivetti"); - fs.insert_tree( - &worktree_path, - serde_json::json!({ - ".git": gitdir, - "src": {}, - }), - ) - .await; - let git_path = format!("/{repo}/.git"); - fs.with_git_state(std::path::Path::new(&git_path), false, |state| { - state.worktrees.push(git::repository::Worktree { + fs.add_linked_worktree_for_repo( + Path::new(&git_path), + false, + git::repository::Worktree { path: std::path::PathBuf::from(format!("/worktrees/{repo}/olivetti/{repo}")), ref_name: Some("refs/heads/olivetti".into()), sha: "aaa".into(), is_main: false, - }); - }) - .unwrap(); + }, + ) + .await; } cx.update(|cx| ::set_global(fs.clone(), cx)); @@ -3075,11 +2970,7 @@ async fn test_same_named_worktree_chips_are_deduplicated(cx: &mut TestAppContext let sidebar = setup_sidebar(&multi_workspace, cx); // Thread with roots in both repos' "olivetti" worktrees. - let thread_paths = PathList::new(&[ - std::path::PathBuf::from("/worktrees/project_a/olivetti/project_a"), - std::path::PathBuf::from("/worktrees/project_b/olivetti/project_b"), - ]); - save_named_thread_metadata("wt-thread", "Same Branch Thread", &thread_paths, cx).await; + save_named_thread_metadata("wt-thread", "Same Branch Thread", &project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -3114,38 +3005,24 @@ async fn test_absorbed_worktree_running_thread_shows_live_status(cx: &mut TestAp fs.insert_tree( "/project", serde_json::json!({ - ".git": { - "worktrees": { - "feature-a": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature-a", - }, - }, - }, + ".git": {}, "src": {}, }), ) .await; // Worktree checkout pointing back to the main repo. - fs.insert_tree( - "/wt-feature-a", - serde_json::json!({ - ".git": "gitdir: /project/.git/worktrees/feature-a", - "src": {}, - }), - ) - .await; - - fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| { - state.worktrees.push(git::repository::Worktree { + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { path: std::path::PathBuf::from("/wt-feature-a"), ref_name: Some("refs/heads/feature-a".into()), sha: "aaa".into(), is_main: false, - }); - }) - .unwrap(); + }, + ) + .await; cx.update(|cx| ::set_global(fs.clone(), cx)); @@ -3163,6 +3040,8 @@ async fn test_absorbed_worktree_running_thread_shows_live_status(cx: &mut TestAp let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + let worktree_workspace = multi_workspace.update_in(cx, |mw, window, cx| { mw.test_add_workspace(worktree_project.clone(), window, cx) }); @@ -3173,12 +3052,10 @@ async fn test_absorbed_worktree_running_thread_shows_live_status(cx: &mut TestAp // Switch back to the main workspace before setting up the sidebar. multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = mw.workspaces()[0].clone(); + let workspace = mw.workspaces().next().unwrap().clone(); mw.activate(workspace, window, cx); }); - let sidebar = setup_sidebar(&multi_workspace, cx); - // Start a thread in the worktree workspace's panel and keep it // generating (don't resolve it). let connection = StubAgentConnection::new(); @@ -3188,8 +3065,7 @@ async fn test_absorbed_worktree_running_thread_shows_live_status(cx: &mut TestAp let session_id = active_session_id(&worktree_panel, cx); // Save metadata so the sidebar knows about this thread. - let wt_paths = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_test_thread_metadata(&session_id, wt_paths, cx).await; + save_test_thread_metadata(&session_id, &worktree_project, cx).await; // Keep the thread generating by sending a chunk without ending // the turn. @@ -3209,7 +3085,7 @@ async fn test_absorbed_worktree_running_thread_shows_live_status(cx: &mut TestAp entries, vec![ "v [project]", - " [+ New Thread]", + " [~ Draft]", " Hello {wt-feature-a} * (running)", ] ); @@ -3231,37 +3107,23 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp fs.insert_tree( "/project", serde_json::json!({ - ".git": { - "worktrees": { - "feature-a": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature-a", - }, - }, - }, - "src": {}, - }), - ) - .await; - - fs.insert_tree( - "/wt-feature-a", - serde_json::json!({ - ".git": "gitdir: /project/.git/worktrees/feature-a", + ".git": {}, "src": {}, }), ) .await; - fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| { - state.worktrees.push(git::repository::Worktree { + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { path: std::path::PathBuf::from("/wt-feature-a"), ref_name: Some("refs/heads/feature-a".into()), sha: "aaa".into(), is_main: false, - }); - }) - .unwrap(); + }, + ) + .await; cx.update(|cx| ::set_global(fs.clone(), cx)); @@ -3278,6 +3140,8 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + let worktree_workspace = multi_workspace.update_in(cx, |mw, window, cx| { mw.test_add_workspace(worktree_project.clone(), window, cx) }); @@ -3285,19 +3149,16 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp let worktree_panel = add_agent_panel(&worktree_workspace, cx); multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = mw.workspaces()[0].clone(); + let workspace = mw.workspaces().next().unwrap().clone(); mw.activate(workspace, window, cx); }); - let sidebar = setup_sidebar(&multi_workspace, cx); - let connection = StubAgentConnection::new(); open_thread_with_connection(&worktree_panel, connection.clone(), cx); send_message(&worktree_panel, cx); let session_id = active_session_id(&worktree_panel, cx); - let wt_paths = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_test_thread_metadata(&session_id, wt_paths, cx).await; + save_test_thread_metadata(&session_id, &worktree_project, cx).await; cx.update(|_, cx| { connection.send_update( @@ -3312,7 +3173,7 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp visible_entries_as_strings(&sidebar, cx), vec![ "v [project]", - " [+ New Thread]", + " [~ Draft]", " Hello {wt-feature-a} * (running)", ] ); @@ -3322,11 +3183,7 @@ async fn test_absorbed_worktree_completion_triggers_notification(cx: &mut TestAp assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - "v [project]", - " [+ New Thread]", - " Hello {wt-feature-a} * (!)", - ] + vec!["v [project]", " [~ Draft]", " Hello {wt-feature-a} * (!)",] ); } @@ -3338,37 +3195,23 @@ async fn test_clicking_worktree_thread_opens_workspace_when_none_exists(cx: &mut fs.insert_tree( "/project", serde_json::json!({ - ".git": { - "worktrees": { - "feature-a": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature-a", - }, - }, - }, - "src": {}, - }), - ) - .await; - - fs.insert_tree( - "/wt-feature-a", - serde_json::json!({ - ".git": "gitdir: /project/.git/worktrees/feature-a", + ".git": {}, "src": {}, }), ) .await; - fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| { - state.worktrees.push(git::repository::Worktree { + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { path: std::path::PathBuf::from("/wt-feature-a"), ref_name: Some("refs/heads/feature-a".into()), sha: "aaa".into(), is_main: false, - }); - }) - .unwrap(); + }, + ) + .await; cx.update(|cx| ::set_global(fs.clone(), cx)); @@ -3378,13 +3221,17 @@ async fn test_clicking_worktree_thread_opens_workspace_when_none_exists(cx: &mut .update(cx, |p, cx| p.git_scans_complete(cx)) .await; + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); // Save a thread for the worktree path (no workspace for it). - let paths_wt = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_named_thread_metadata("thread-wt", "WT Thread", &paths_wt, cx).await; + save_named_thread_metadata("thread-wt", "WT Thread", &worktree_project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -3392,23 +3239,19 @@ async fn test_clicking_worktree_thread_opens_workspace_when_none_exists(cx: &mut // Thread should appear under the main repo with a worktree chip. assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - "v [project]", - " [+ New Thread]", - " WT Thread {wt-feature-a}" - ], + vec!["v [project]", " WT Thread {wt-feature-a}"], ); // Only 1 workspace should exist. assert_eq!( - multi_workspace.read_with(cx, |mw, _| mw.workspaces().len()), + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), 1, ); // Focus the sidebar and select the worktree thread. - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); sidebar.update_in(cx, |sidebar, _window, _cx| { - sidebar.selection = Some(2); // index 0 is header, 1 is new thread, 2 is the thread + sidebar.selection = Some(1); // index 0 is header, 1 is the thread }); // Confirm to open the worktree thread. @@ -3418,11 +3261,11 @@ async fn test_clicking_worktree_thread_opens_workspace_when_none_exists(cx: &mut // A new workspace should have been created for the worktree path. let new_workspace = multi_workspace.read_with(cx, |mw, _| { assert_eq!( - mw.workspaces().len(), + mw.workspaces().count(), 2, "confirming a worktree thread without a workspace should open one", ); - mw.workspaces()[1].clone() + mw.workspaces().nth(1).unwrap().clone() }); let new_path_list = @@ -3444,37 +3287,23 @@ async fn test_clicking_worktree_thread_does_not_briefly_render_as_separate_proje fs.insert_tree( "/project", serde_json::json!({ - ".git": { - "worktrees": { - "feature-a": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature-a", - }, - }, - }, - "src": {}, - }), - ) - .await; - - fs.insert_tree( - "/wt-feature-a", - serde_json::json!({ - ".git": "gitdir: /project/.git/worktrees/feature-a", + ".git": {}, "src": {}, }), ) .await; - fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| { - state.worktrees.push(git::repository::Worktree { + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { path: std::path::PathBuf::from("/wt-feature-a"), ref_name: Some("refs/heads/feature-a".into()), sha: "aaa".into(), is_main: false, - }); - }) - .unwrap(); + }, + ) + .await; cx.update(|cx| ::set_global(fs.clone(), cx)); @@ -3483,28 +3312,28 @@ async fn test_clicking_worktree_thread_does_not_briefly_render_as_separate_proje .update(cx, |p, cx| p.git_scans_complete(cx)) .await; + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let paths_wt = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_named_thread_metadata("thread-wt", "WT Thread", &paths_wt, cx).await; + save_named_thread_metadata("thread-wt", "WT Thread", &worktree_project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); assert_eq!( visible_entries_as_strings(&sidebar, cx), - vec![ - "v [project]", - " [+ New Thread]", - " WT Thread {wt-feature-a}" - ], + vec!["v [project]", " WT Thread {wt-feature-a}"], ); - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); sidebar.update_in(cx, |sidebar, _window, _cx| { - sidebar.selection = Some(2); + sidebar.selection = Some(1); // index 0 is header, 1 is the thread }); let assert_sidebar_state = |sidebar: &mut Sidebar, _cx: &mut Context| { @@ -3560,7 +3389,7 @@ async fn test_clicking_worktree_thread_does_not_briefly_render_as_separate_proje ListEntry::ViewMore { .. } => { panic!("unexpected `View More` entry while opening linked worktree thread"); } - ListEntry::NewThread { .. } => {} + ListEntry::DraftThread { .. } | ListEntry::NewThread { .. } => {} } } @@ -3595,37 +3424,23 @@ async fn test_clicking_absorbed_worktree_thread_activates_worktree_workspace( fs.insert_tree( "/project", serde_json::json!({ - ".git": { - "worktrees": { - "feature-a": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature-a", - }, - }, - }, - "src": {}, - }), - ) - .await; - - fs.insert_tree( - "/wt-feature-a", - serde_json::json!({ - ".git": "gitdir: /project/.git/worktrees/feature-a", + ".git": {}, "src": {}, }), ) .await; - fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| { - state.worktrees.push(git::repository::Worktree { + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { path: std::path::PathBuf::from("/wt-feature-a"), ref_name: Some("refs/heads/feature-a".into()), sha: "aaa".into(), is_main: false, - }); - }) - .unwrap(); + }, + ) + .await; cx.update(|cx| ::set_global(fs.clone(), cx)); @@ -3642,22 +3457,21 @@ async fn test_clicking_absorbed_worktree_thread_activates_worktree_workspace( let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + let worktree_workspace = multi_workspace.update_in(cx, |mw, window, cx| { mw.test_add_workspace(worktree_project.clone(), window, cx) }); // Activate the main workspace before setting up the sidebar. - multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = mw.workspaces()[0].clone(); - mw.activate(workspace, window, cx); + let main_workspace = multi_workspace.update_in(cx, |mw, window, cx| { + let workspace = mw.workspaces().next().unwrap().clone(); + mw.activate(workspace.clone(), window, cx); + workspace }); - let sidebar = setup_sidebar(&multi_workspace, cx); - - let paths_main = PathList::new(&[std::path::PathBuf::from("/project")]); - let paths_wt = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_named_thread_metadata("thread-main", "Main Thread", &paths_main, cx).await; - save_named_thread_metadata("thread-wt", "WT Thread", &paths_wt, cx).await; + save_named_thread_metadata("thread-main", "Main Thread", &main_project, cx).await; + save_named_thread_metadata("thread-wt", "WT Thread", &worktree_project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -3675,13 +3489,13 @@ async fn test_clicking_absorbed_worktree_thread_activates_worktree_workspace( .expect("should find the worktree thread entry"); assert_eq!( - multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), - 0, + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + main_workspace, "main workspace should be active initially" ); // Focus the sidebar and select the absorbed worktree thread. - open_and_focus_sidebar(&sidebar, cx); + focus_sidebar(&sidebar, cx); sidebar.update_in(cx, |sidebar, _window, _cx| { sidebar.selection = Some(wt_thread_index); }); @@ -3691,9 +3505,7 @@ async fn test_clicking_absorbed_worktree_thread_activates_worktree_workspace( cx.run_until_parked(); // The worktree workspace should now be active, not the main one. - let active_workspace = multi_workspace.read_with(cx, |mw, _| { - mw.workspaces()[mw.active_workspace_index()].clone() - }); + let active_workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); assert_eq!( active_workspace, worktree_workspace, "clicking an absorbed worktree thread should activate the worktree workspace" @@ -3718,28 +3530,29 @@ async fn test_activate_archived_thread_with_saved_paths_activates_matching_works let project_b = project::Project::test(fs.clone(), ["/project-b".as_ref()], cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); - - multi_workspace.update_in(cx, |mw, window, cx| { - mw.test_add_workspace(project_b, window, cx); - }); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); + let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b.clone(), window, cx) + }); + let workspace_a = + multi_workspace.read_with(cx, |mw, _| mw.workspaces().next().unwrap().clone()); + // Save a thread with path_list pointing to project-b. - let path_list_b = PathList::new(&[std::path::PathBuf::from("/project-b")]); let session_id = acp::SessionId::new(Arc::from("archived-1")); - save_test_thread_metadata(&session_id, path_list_b.clone(), cx).await; + save_test_thread_metadata(&session_id, &project_b, cx).await; // Ensure workspace A is active. multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = mw.workspaces()[0].clone(); + let workspace = mw.workspaces().next().unwrap().clone(); mw.activate(workspace, window, cx); }); cx.run_until_parked(); assert_eq!( - multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), - 0 + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_a ); // Call activate_archived_thread – should resolve saved paths and @@ -3753,6 +3566,7 @@ async fn test_activate_archived_thread_with_saved_paths_activates_matching_works updated_at: Utc::now(), created_at: None, folder_paths: PathList::new(&[PathBuf::from("/project-b")]), + main_worktree_paths: PathList::default(), archived: false, }, window, @@ -3762,8 +3576,8 @@ async fn test_activate_archived_thread_with_saved_paths_activates_matching_works cx.run_until_parked(); assert_eq!( - multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), - 1, + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_b, "should have activated the workspace matching the saved path_list" ); } @@ -3788,21 +3602,23 @@ async fn test_activate_archived_thread_cwd_fallback_with_matching_workspace( let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); - multi_workspace.update_in(cx, |mw, window, cx| { - mw.test_add_workspace(project_b, window, cx); - }); - let sidebar = setup_sidebar(&multi_workspace, cx); + let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx) + }); + let workspace_a = + multi_workspace.read_with(cx, |mw, _| mw.workspaces().next().unwrap().clone()); + // Start with workspace A active. multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = mw.workspaces()[0].clone(); + let workspace = mw.workspaces().next().unwrap().clone(); mw.activate(workspace, window, cx); }); cx.run_until_parked(); assert_eq!( - multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), - 0 + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_a ); // No thread saved to the store – cwd is the only path hint. @@ -3815,6 +3631,7 @@ async fn test_activate_archived_thread_cwd_fallback_with_matching_workspace( updated_at: Utc::now(), created_at: None, folder_paths: PathList::new(&[std::path::PathBuf::from("/project-b")]), + main_worktree_paths: PathList::default(), archived: false, }, window, @@ -3824,8 +3641,8 @@ async fn test_activate_archived_thread_cwd_fallback_with_matching_workspace( cx.run_until_parked(); assert_eq!( - multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), - 1, + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_b, "should have activated the workspace matching the cwd" ); } @@ -3850,21 +3667,21 @@ async fn test_activate_archived_thread_no_paths_no_cwd_uses_active_workspace( let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); - multi_workspace.update_in(cx, |mw, window, cx| { - mw.test_add_workspace(project_b, window, cx); - }); - let sidebar = setup_sidebar(&multi_workspace, cx); + let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx) + }); + // Activate workspace B (index 1) to make it the active one. multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = mw.workspaces()[1].clone(); + let workspace = mw.workspaces().nth(1).unwrap().clone(); mw.activate(workspace, window, cx); }); cx.run_until_parked(); assert_eq!( - multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), - 1 + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_b ); // No saved thread, no cwd – should fall back to the active workspace. @@ -3877,6 +3694,7 @@ async fn test_activate_archived_thread_no_paths_no_cwd_uses_active_workspace( updated_at: Utc::now(), created_at: None, folder_paths: PathList::default(), + main_worktree_paths: PathList::default(), archived: false, }, window, @@ -3886,8 +3704,8 @@ async fn test_activate_archived_thread_no_paths_no_cwd_uses_active_workspace( cx.run_until_parked(); assert_eq!( - multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), - 1, + multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()), + workspace_b, "should have stayed on the active workspace when no path info is available" ); } @@ -3917,7 +3735,7 @@ async fn test_activate_archived_thread_saved_paths_opens_new_workspace(cx: &mut let session_id = acp::SessionId::new(Arc::from("archived-new-ws")); assert_eq!( - multi_workspace.read_with(cx, |mw, _| mw.workspaces().len()), + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), 1, "should start with one workspace" ); @@ -3931,6 +3749,7 @@ async fn test_activate_archived_thread_saved_paths_opens_new_workspace(cx: &mut updated_at: Utc::now(), created_at: None, folder_paths: path_list_b, + main_worktree_paths: PathList::default(), archived: false, }, window, @@ -3940,7 +3759,7 @@ async fn test_activate_archived_thread_saved_paths_opens_new_workspace(cx: &mut cx.run_until_parked(); assert_eq!( - multi_workspace.read_with(cx, |mw, _| mw.workspaces().len()), + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), 2, "should have opened a second workspace for the archived thread's saved paths" ); @@ -3965,6 +3784,10 @@ async fn test_activate_archived_thread_reuses_workspace_in_another_window(cx: &m cx.add_window(|window, cx| MultiWorkspace::test_new(project_b, window, cx)); let multi_workspace_a_entity = multi_workspace_a.root(cx).unwrap(); + let multi_workspace_b_entity = multi_workspace_b.root(cx).unwrap(); + + let cx_b = &mut gpui::VisualTestContext::from_window(multi_workspace_b.into(), cx); + let _sidebar_b = setup_sidebar(&multi_workspace_b_entity, cx_b); let cx_a = &mut gpui::VisualTestContext::from_window(multi_workspace_a.into(), cx); let sidebar = setup_sidebar(&multi_workspace_a_entity, cx_a); @@ -3980,6 +3803,7 @@ async fn test_activate_archived_thread_reuses_workspace_in_another_window(cx: &m updated_at: Utc::now(), created_at: None, folder_paths: PathList::new(&[PathBuf::from("/project-b")]), + main_worktree_paths: PathList::default(), archived: false, }, window, @@ -3990,14 +3814,14 @@ async fn test_activate_archived_thread_reuses_workspace_in_another_window(cx: &m assert_eq!( multi_workspace_a - .read_with(cx_a, |mw, _| mw.workspaces().len()) + .read_with(cx_a, |mw, _| mw.workspaces().count()) .unwrap(), 1, "should not add the other window's workspace into the current window" ); assert_eq!( multi_workspace_b - .read_with(cx_a, |mw, _| mw.workspaces().len()) + .read_with(cx_a, |mw, _| mw.workspaces().count()) .unwrap(), 1, "should reuse the existing workspace in the other window" @@ -4056,6 +3880,7 @@ async fn test_activate_archived_thread_reuses_workspace_in_another_window_with_t updated_at: Utc::now(), created_at: None, folder_paths: PathList::new(&[PathBuf::from("/project-b")]), + main_worktree_paths: PathList::default(), archived: false, }, window, @@ -4066,14 +3891,14 @@ async fn test_activate_archived_thread_reuses_workspace_in_another_window_with_t assert_eq!( multi_workspace_a - .read_with(cx_a, |mw, _| mw.workspaces().len()) + .read_with(cx_a, |mw, _| mw.workspaces().count()) .unwrap(), 1, "should not add the other window's workspace into the current window" ); assert_eq!( multi_workspace_b - .read_with(cx_a, |mw, _| mw.workspaces().len()) + .read_with(cx_a, |mw, _| mw.workspaces().count()) .unwrap(), 1, "should reuse the existing workspace in the other window" @@ -4116,6 +3941,10 @@ async fn test_activate_archived_thread_prefers_current_window_for_matching_paths cx.add_window(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); let multi_workspace_a_entity = multi_workspace_a.root(cx).unwrap(); + let multi_workspace_b_entity = multi_workspace_b.root(cx).unwrap(); + + let cx_b = &mut gpui::VisualTestContext::from_window(multi_workspace_b.into(), cx); + let _sidebar_b = setup_sidebar(&multi_workspace_b_entity, cx_b); let cx_a = &mut gpui::VisualTestContext::from_window(multi_workspace_a.into(), cx); let sidebar_a = setup_sidebar(&multi_workspace_a_entity, cx_a); @@ -4131,6 +3960,7 @@ async fn test_activate_archived_thread_prefers_current_window_for_matching_paths updated_at: Utc::now(), created_at: None, folder_paths: PathList::new(&[PathBuf::from("/project-a")]), + main_worktree_paths: PathList::default(), archived: false, }, window, @@ -4152,14 +3982,14 @@ async fn test_activate_archived_thread_prefers_current_window_for_matching_paths }); assert_eq!( multi_workspace_a - .read_with(cx_a, |mw, _| mw.workspaces().len()) + .read_with(cx_a, |mw, _| mw.workspaces().count()) .unwrap(), 1, "current window should continue reusing its existing workspace" ); assert_eq!( multi_workspace_b - .read_with(cx_a, |mw, _| mw.workspaces().len()) + .read_with(cx_a, |mw, _| mw.workspaces().count()) .unwrap(), 1, "other windows should not be activated just because they also match the saved paths" @@ -4190,37 +4020,23 @@ async fn test_archive_thread_uses_next_threads_own_workspace(cx: &mut TestAppCon fs.insert_tree( "/project", serde_json::json!({ - ".git": { - "worktrees": { - "feature-a": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature-a", - }, - }, - }, - "src": {}, - }), - ) - .await; - - fs.insert_tree( - "/wt-feature-a", - serde_json::json!({ - ".git": "gitdir: /project/.git/worktrees/feature-a", + ".git": {}, "src": {}, }), ) .await; - fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| { - state.worktrees.push(git::repository::Worktree { + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { path: std::path::PathBuf::from("/wt-feature-a"), ref_name: Some("refs/heads/feature-a".into()), sha: "aaa".into(), is_main: false, - }); - }) - .unwrap(); + }, + ) + .await; cx.update(|cx| ::set_global(fs.clone(), cx)); @@ -4237,19 +4053,20 @@ async fn test_archive_thread_uses_next_threads_own_workspace(cx: &mut TestAppCon let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(main_project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + let worktree_workspace = multi_workspace.update_in(cx, |mw, window, cx| { mw.test_add_workspace(worktree_project.clone(), window, cx) }); // Activate main workspace so the sidebar tracks the main panel. multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = mw.workspaces()[0].clone(); + let workspace = mw.workspaces().next().unwrap().clone(); mw.activate(workspace, window, cx); }); - let sidebar = setup_sidebar(&multi_workspace, cx); - - let main_workspace = multi_workspace.read_with(cx, |mw, _| mw.workspaces()[0].clone()); + let main_workspace = + multi_workspace.read_with(cx, |mw, _| mw.workspaces().next().unwrap().clone()); let main_panel = add_agent_panel(&main_workspace, cx); let _worktree_panel = add_agent_panel(&worktree_workspace, cx); @@ -4274,7 +4091,7 @@ async fn test_archive_thread_uses_next_threads_own_workspace(cx: &mut TestAppCon "Thread 2".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), None, - PathList::new(&[std::path::PathBuf::from("/project")]), + &main_project, cx, ); @@ -4286,7 +4103,7 @@ async fn test_archive_thread_uses_next_threads_own_workspace(cx: &mut TestAppCon "Thread 1".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]), + &worktree_project, cx, ); @@ -4354,22 +4171,7 @@ async fn test_linked_worktree_threads_not_duplicated_across_groups(cx: &mut Test fs.insert_tree( "/project", serde_json::json!({ - ".git": { - "worktrees": { - "feature-a": { - "commondir": "../../", - "HEAD": "ref: refs/heads/feature-a", - }, - }, - }, - "src": {}, - }), - ) - .await; - fs.insert_tree( - "/wt-feature-a", - serde_json::json!({ - ".git": "gitdir: /project/.git/worktrees/feature-a", + ".git": {}, "src": {}, }), ) @@ -4384,15 +4186,17 @@ async fn test_linked_worktree_threads_not_duplicated_across_groups(cx: &mut Test .await; // Register the linked worktree in the main repo. - fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| { - state.worktrees.push(git::repository::Worktree { + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { path: std::path::PathBuf::from("/wt-feature-a"), ref_name: Some("refs/heads/feature-a".into()), sha: "aaa".into(), is_main: false, - }); - }) - .unwrap(); + }, + ) + .await; cx.update(|cx| ::set_global(fs.clone(), cx)); @@ -4409,16 +4213,20 @@ async fn test_linked_worktree_threads_not_duplicated_across_groups(cx: &mut Test .update(cx, |p, cx| p.git_scans_complete(cx)) .await; + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_only.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); multi_workspace.update_in(cx, |mw, window, cx| { mw.test_add_workspace(multi_root.clone(), window, cx); }); - let sidebar = setup_sidebar(&multi_workspace, cx); // Save a thread under the linked worktree path. - let wt_paths = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); - save_named_thread_metadata("wt-thread", "Worktree Thread", &wt_paths, cx).await; + save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await; multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); cx.run_until_parked(); @@ -4428,11 +4236,10 @@ async fn test_linked_worktree_threads_not_duplicated_across_groups(cx: &mut Test assert_eq!( visible_entries_as_strings(&sidebar, cx), vec![ - "v [project]", - " [+ New Thread]", - " Worktree Thread {wt-feature-a}", "v [other, project]", " [+ New Thread]", + "v [project]", + " Worktree Thread {wt-feature-a}", ] ); } @@ -4444,8 +4251,6 @@ async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let (sidebar, panel) = setup_sidebar_with_agent_panel(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - let switcher_ids = |sidebar: &Entity, cx: &mut gpui::VisualTestContext| -> Vec { sidebar.read_with(cx, |sidebar, cx| { @@ -4492,7 +4297,7 @@ async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { "Thread C".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), Some(chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap()), - path_list.clone(), + &project, cx, ); @@ -4508,7 +4313,7 @@ async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { "Thread B".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), Some(chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap()), - path_list.clone(), + &project, cx, ); @@ -4524,7 +4329,7 @@ async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { "Thread A".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 3, 0, 0, 0).unwrap(), Some(chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 3, 0, 0, 0).unwrap()), - path_list.clone(), + &project, cx, ); @@ -4533,8 +4338,8 @@ async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { // so all three have last_accessed_at set. // Access order is: A (most recent), B, C (oldest). - // ── 1. Open switcher: threads sorted by last_accessed_at ─────────── - open_and_focus_sidebar(&sidebar, cx); + // ── 1. Open switcher: threads sorted by last_accessed_at ───────────────── + focus_sidebar(&sidebar, cx); sidebar.update_in(cx, |sidebar, window, cx| { sidebar.on_toggle_thread_switcher(&ToggleThreadSwitcher::default(), window, cx); }); @@ -4710,7 +4515,7 @@ async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { "Historical Thread".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 6, 1, 0, 0, 0).unwrap(), Some(chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 6, 1, 0, 0, 0).unwrap()), - path_list.clone(), + &project, cx, ); @@ -4751,7 +4556,7 @@ async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { "Old Historical Thread".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2023, 6, 1, 0, 0, 0).unwrap(), Some(chrono::TimeZone::with_ymd_and_hms(&Utc, 2023, 6, 1, 0, 0, 0).unwrap()), - path_list, + &project, cx, ); @@ -4785,17 +4590,15 @@ async fn test_thread_switcher_ordering(cx: &mut TestAppContext) { async fn test_archive_thread_keeps_metadata_but_hides_from_sidebar(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_thread_metadata( acp::SessionId::new(Arc::from("thread-to-archive")), "Thread To Archive".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); cx.run_until_parked(); @@ -4837,17 +4640,15 @@ async fn test_archive_thread_keeps_metadata_but_hides_from_sidebar(cx: &mut Test async fn test_archived_threads_excluded_from_sidebar_entries(cx: &mut TestAppContext) { let project = init_test_project("/my-project", cx).await; let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let sidebar = setup_sidebar(&multi_workspace, cx); - let path_list = PathList::new(&[std::path::PathBuf::from("/my-project")]); - save_thread_metadata( acp::SessionId::new(Arc::from("visible-thread")), "Visible Thread".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 2, 0, 0, 0).unwrap(), None, - path_list.clone(), + &project, cx, ); @@ -4857,7 +4658,7 @@ async fn test_archived_threads_excluded_from_sidebar_entries(cx: &mut TestAppCon "Archived Thread".into(), chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), None, - path_list, + &project, cx, ); @@ -4897,12 +4698,376 @@ async fn test_archived_threads_excluded_from_sidebar_entries(cx: &mut TestAppCon }); } +#[gpui::test] +async fn test_linked_worktree_workspace_shows_main_worktree_threads(cx: &mut TestAppContext) { + // When only a linked worktree workspace is open (not the main repo), + // threads saved against the main repo should still appear in the sidebar. + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + // Create the main repo with a linked worktree. + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": { + "worktrees": { + "feature-a": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature-a", + }, + }, + }, + "src": {}, + }), + ) + .await; + + fs.insert_tree( + "/wt-feature-a", + serde_json::json!({ + ".git": "gitdir: /project/.git/worktrees/feature-a", + "src": {}, + }), + ) + .await; + + fs.add_linked_worktree_for_repo( + std::path::Path::new("/project/.git"), + false, + git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "abc".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + // Only open the linked worktree as a workspace — NOT the main repo. + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = cx.add_window_view(|window, cx| { + MultiWorkspace::test_new(worktree_project.clone(), window, cx) + }); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Save a thread against the MAIN repo path. + save_named_thread_metadata("main-thread", "Main Repo Thread", &main_project, cx).await; + + // Save a thread against the linked worktree path. + save_named_thread_metadata("wt-thread", "Worktree Thread", &worktree_project, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Both threads should be visible: the worktree thread by direct lookup, + // and the main repo thread because the workspace is a linked worktree + // and we also query the main repo path. + let entries = visible_entries_as_strings(&sidebar, cx); + assert!( + entries.iter().any(|e| e.contains("Main Repo Thread")), + "expected main repo thread to be visible in linked worktree workspace, got: {entries:?}" + ); + assert!( + entries.iter().any(|e| e.contains("Worktree Thread")), + "expected worktree thread to be visible, got: {entries:?}" + ); +} + +async fn init_multi_project_test( + paths: &[&str], + cx: &mut TestAppContext, +) -> (Arc, Entity) { + agent_ui::test_support::init_test(cx); + cx.update(|cx| { + cx.update_flags(false, vec!["agent-v2".into()]); + ThreadStore::init_global(cx); + ThreadMetadataStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + prompt_store::init(cx); + }); + let fs = FakeFs::new(cx.executor()); + for path in paths { + fs.insert_tree(path, serde_json::json!({ ".git": {}, "src": {} })) + .await; + } + cx.update(|cx| ::set_global(fs.clone(), cx)); + let project = + project::Project::test(fs.clone() as Arc, [paths[0].as_ref()], cx).await; + (fs, project) +} + +async fn add_test_project( + path: &str, + fs: &Arc, + multi_workspace: &Entity, + cx: &mut gpui::VisualTestContext, +) -> Entity { + let project = project::Project::test(fs.clone() as Arc, [path.as_ref()], cx).await; + let workspace = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project, window, cx) + }); + cx.run_until_parked(); + workspace +} + +#[gpui::test] +async fn test_transient_workspace_lifecycle(cx: &mut TestAppContext) { + let (fs, project_a) = + init_multi_project_test(&["/project-a", "/project-b", "/project-c"], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + let _sidebar = setup_sidebar_closed(&multi_workspace, cx); + + // Sidebar starts closed. Initial workspace A is transient. + let workspace_a = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + assert!(!multi_workspace.read_with(cx, |mw, _| mw.sidebar_open())); + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspace() == &workspace_a)); + + // Add B — replaces A as the transient workspace. + let workspace_b = add_test_project("/project-b", &fs, &multi_workspace, cx).await; + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspace() == &workspace_b)); + + // Add C — replaces B as the transient workspace. + let workspace_c = add_test_project("/project-c", &fs, &multi_workspace, cx).await; + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspace() == &workspace_c)); +} + +#[gpui::test] +async fn test_transient_workspace_retained(cx: &mut TestAppContext) { + let (fs, project_a) = init_multi_project_test( + &["/project-a", "/project-b", "/project-c", "/project-d"], + cx, + ) + .await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + let _sidebar = setup_sidebar(&multi_workspace, cx); + assert!(multi_workspace.read_with(cx, |mw, _| mw.sidebar_open())); + + // Add B — retained since sidebar is open. + let workspace_a = add_test_project("/project-b", &fs, &multi_workspace, cx).await; + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 2 + ); + + // Switch to A — B survives. (Switching from one internal workspace, to another) + multi_workspace.update_in(cx, |mw, window, cx| mw.activate(workspace_a, window, cx)); + cx.run_until_parked(); + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 2 + ); + + // Close sidebar — both A and B remain retained. + multi_workspace.update_in(cx, |mw, window, cx| mw.close_sidebar(window, cx)); + cx.run_until_parked(); + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 2 + ); + + // Add C — added as new transient workspace. (switching from retained, to transient) + let workspace_c = add_test_project("/project-c", &fs, &multi_workspace, cx).await; + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 3 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspace() == &workspace_c)); + + // Add D — replaces C as the transient workspace (Have retained and transient workspaces, transient workspace is dropped) + let workspace_d = add_test_project("/project-d", &fs, &multi_workspace, cx).await; + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 3 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspace() == &workspace_d)); +} + +#[gpui::test] +async fn test_transient_workspace_promotion(cx: &mut TestAppContext) { + let (fs, project_a) = + init_multi_project_test(&["/project-a", "/project-b", "/project-c"], cx).await; + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + setup_sidebar_closed(&multi_workspace, cx); + + // Add B — replaces A as the transient workspace (A is discarded). + let workspace_b = add_test_project("/project-b", &fs, &multi_workspace, cx).await; + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspace() == &workspace_b)); + + // Open sidebar — promotes the transient B to retained. + multi_workspace.update_in(cx, |mw, window, cx| { + mw.toggle_sidebar(window, cx); + }); + cx.run_until_parked(); + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspaces().any(|w| w == &workspace_b))); + + // Close sidebar — the retained B remains. + multi_workspace.update_in(cx, |mw, window, cx| { + mw.toggle_sidebar(window, cx); + }); + + // Add C — added as new transient workspace. + let workspace_c = add_test_project("/project-c", &fs, &multi_workspace, cx).await; + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 2 + ); + assert!(multi_workspace.read_with(cx, |mw, _| mw.workspace() == &workspace_c)); +} + +#[gpui::test] +async fn test_legacy_thread_with_canonical_path_opens_main_repo_workspace(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": { + "worktrees": { + "feature-a": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature-a", + }, + }, + }, + "src": {}, + }), + ) + .await; + + fs.insert_tree( + "/wt-feature-a", + serde_json::json!({ + ".git": "gitdir: /project/.git/worktrees/feature-a", + "src": {}, + }), + ) + .await; + + fs.add_linked_worktree_for_repo( + Path::new("/project/.git"), + false, + git::repository::Worktree { + path: PathBuf::from("/wt-feature-a"), + ref_name: Some("refs/heads/feature-a".into()), + sha: "abc".into(), + is_main: false, + }, + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + // Only a linked worktree workspace is open — no workspace for /project. + let worktree_project = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = cx.add_window_view(|window, cx| { + MultiWorkspace::test_new(worktree_project.clone(), window, cx) + }); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Save a legacy thread: folder_paths = main repo, main_worktree_paths = empty. + let legacy_session = acp::SessionId::new(Arc::from("legacy-main-thread")); + cx.update(|_, cx| { + let metadata = ThreadMetadata { + session_id: legacy_session.clone(), + agent_id: agent::ZED_AGENT_ID.clone(), + title: "Legacy Main Thread".into(), + updated_at: chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + created_at: None, + folder_paths: PathList::new(&[PathBuf::from("/project")]), + main_worktree_paths: PathList::default(), + archived: false, + }; + ThreadMetadataStore::global(cx).update(cx, |store, cx| store.save_manually(metadata, cx)); + }); + cx.run_until_parked(); + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // The legacy thread should appear in the sidebar under the project group. + let entries = visible_entries_as_strings(&sidebar, cx); + assert!( + entries.iter().any(|e| e.contains("Legacy Main Thread")), + "legacy thread should be visible: {entries:?}", + ); + + // Verify only 1 workspace before clicking. + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().count()), + 1, + ); + + // Focus and select the legacy thread, then confirm. + focus_sidebar(&sidebar, cx); + let thread_index = sidebar.read_with(cx, |sidebar, _| { + sidebar + .contents + .entries + .iter() + .position(|e| e.session_id().is_some_and(|id| id == &legacy_session)) + .expect("legacy thread should be in entries") + }); + sidebar.update_in(cx, |sidebar, _window, _cx| { + sidebar.selection = Some(thread_index); + }); + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + let new_workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + let new_path_list = + new_workspace.read_with(cx, |_, cx| workspace_path_list(&new_workspace, cx)); + assert_eq!( + new_path_list, + PathList::new(&[PathBuf::from("/project")]), + "the new workspace should be for the main repo, not the linked worktree", + ); +} + mod property_test { use super::*; - use gpui::EntityId; struct UnopenedWorktree { path: String, + main_workspace_path: String, } struct TestState { @@ -5031,8 +5196,22 @@ mod property_test { } fn save_thread_to_path( + state: &mut TestState, + project: &Entity, + cx: &mut gpui::VisualTestContext, + ) { + let session_id = state.next_thread_id(); + let title: SharedString = format!("Thread {}", session_id).into(); + let updated_at = chrono::TimeZone::with_ymd_and_hms(&chrono::Utc, 2024, 1, 1, 0, 0, 0) + .unwrap() + + chrono::Duration::seconds(state.thread_counter as i64); + save_thread_metadata(session_id, title, updated_at, None, project, cx); + } + + fn save_thread_to_path_with_main( state: &mut TestState, path_list: PathList, + main_worktree_paths: PathList, cx: &mut gpui::VisualTestContext, ) { let session_id = state.next_thread_id(); @@ -5040,7 +5219,21 @@ mod property_test { let updated_at = chrono::TimeZone::with_ymd_and_hms(&chrono::Utc, 2024, 1, 1, 0, 0, 0) .unwrap() + chrono::Duration::seconds(state.thread_counter as i64); - save_thread_metadata(session_id, title, updated_at, None, path_list, cx); + let metadata = ThreadMetadata { + session_id, + agent_id: agent::ZED_AGENT_ID.clone(), + title, + updated_at, + created_at: None, + folder_paths: path_list, + main_worktree_paths, + archived: false, + }; + cx.update(|_, cx| { + ThreadMetadataStore::global(cx) + .update(cx, |store, cx| store.save_manually(metadata, cx)) + }); + cx.run_until_parked(); } async fn perform_operation( @@ -5052,16 +5245,22 @@ mod property_test { ) { match operation { Operation::SaveThread { workspace_index } => { - let workspace = - multi_workspace.read_with(cx, |mw, _| mw.workspaces()[workspace_index].clone()); - let path_list = workspace - .read_with(cx, |workspace, cx| PathList::new(&workspace.root_paths(cx))); - save_thread_to_path(state, path_list, cx); + let project = multi_workspace.read_with(cx, |mw, cx| { + mw.workspaces() + .nth(workspace_index) + .unwrap() + .read(cx) + .project() + .clone() + }); + save_thread_to_path(state, &project, cx); } Operation::SaveWorktreeThread { worktree_index } => { let worktree = &state.unopened_worktrees[worktree_index]; let path_list = PathList::new(&[std::path::PathBuf::from(&worktree.path)]); - save_thread_to_path(state, path_list, cx); + let main_worktree_paths = + PathList::new(&[std::path::PathBuf::from(&worktree.main_workspace_path)]); + save_thread_to_path_with_main(state, path_list, main_worktree_paths, cx); } Operation::DeleteThread { index } => { let session_id = state.remove_thread(index); @@ -5139,7 +5338,7 @@ mod property_test { } Operation::RemoveWorkspace { index } => { let removed = multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = mw.workspaces()[index].clone(); + let workspace = mw.workspaces().nth(index).unwrap().clone(); mw.remove(&workspace, window, cx) }); if removed { @@ -5153,8 +5352,8 @@ mod property_test { } } Operation::SwitchWorkspace { index } => { - let workspace = - multi_workspace.read_with(cx, |mw, _| mw.workspaces()[index].clone()); + let workspace = multi_workspace + .read_with(cx, |mw, _| mw.workspaces().nth(index).unwrap().clone()); multi_workspace.update_in(cx, |mw, window, cx| { mw.activate(workspace, window, cx); }); @@ -5191,19 +5390,22 @@ mod property_test { let worktree_pathbuf = std::path::PathBuf::from(&worktree_path); state .fs - .with_git_state(dot_git_path, false, |git_state| { - git_state.worktrees.push(git::repository::Worktree { + .add_linked_worktree_for_repo( + dot_git_path, + false, + git::repository::Worktree { path: worktree_pathbuf, ref_name: Some(format!("refs/heads/{}", worktree_name).into()), sha: "aaa".into(), is_main: false, - }); - }) - .unwrap(); + }, + ) + .await; // Re-scan the main workspace's project so it discovers the new worktree. - let main_workspace = - multi_workspace.read_with(cx, |mw, _| mw.workspaces()[workspace_index].clone()); + let main_workspace = multi_workspace.read_with(cx, |mw, _| { + mw.workspaces().nth(workspace_index).unwrap().clone() + }); let main_project = main_workspace.read_with(cx, |ws, _| ws.project().clone()); main_project .update(cx, |p, cx| p.git_scans_complete(cx)) @@ -5211,6 +5413,7 @@ mod property_test { state.unopened_worktrees.push(UnopenedWorktree { path: worktree_path, + main_workspace_path: main_path.clone(), }); } } @@ -5224,7 +5427,7 @@ mod property_test { .entries .iter() .filter_map(|entry| match entry { - ListEntry::ProjectHeader { path_list, .. } => Some(path_list.clone()), + ListEntry::ProjectHeader { key, .. } => Some(key.path_list().clone()), _ => None, }) .collect(); @@ -5250,31 +5453,32 @@ mod property_test { anyhow::bail!("sidebar should still have an associated multi-workspace"); }; - let workspaces = multi_workspace.read(cx).workspaces().to_vec(); + let mw = multi_workspace.read(cx); - // Workspaces with no root paths are not shown because the - // sidebar skips empty path lists. All other workspaces should - // appear — either via a Thread entry or a NewThread entry for - // threadless workspaces. - let expected_workspaces: HashSet = workspaces - .iter() - .filter(|ws| !workspace_path_list(ws, cx).paths().is_empty()) - .map(|ws| ws.entity_id()) + // Every project group key in the multi-workspace that has a + // non-empty path list should appear as a ProjectHeader in the + // sidebar. + let expected_keys: HashSet<&project::ProjectGroupKey> = mw + .project_group_keys() + .filter(|k| !k.path_list().paths().is_empty()) .collect(); - let sidebar_workspaces: HashSet = sidebar + let sidebar_keys: HashSet<&project::ProjectGroupKey> = sidebar .contents .entries .iter() - .filter_map(|entry| entry.workspace().map(|ws| ws.entity_id())) + .filter_map(|entry| match entry { + ListEntry::ProjectHeader { key, .. } => Some(key), + _ => None, + }) .collect(); - let missing = &expected_workspaces - &sidebar_workspaces; - let stray = &sidebar_workspaces - &expected_workspaces; + let missing = &expected_keys - &sidebar_keys; + let stray = &sidebar_keys - &expected_keys; anyhow::ensure!( missing.is_empty() && stray.is_empty(), - "sidebar workspaces don't match multi-workspace.\n\ + "sidebar project groups don't match multi-workspace.\n\ Only in multi-workspace (missing): {:?}\n\ Only in sidebar (stray): {:?}", missing, @@ -5288,7 +5492,11 @@ mod property_test { let Some(multi_workspace) = sidebar.multi_workspace.upgrade() else { anyhow::bail!("sidebar should still have an associated multi-workspace"); }; - let workspaces = multi_workspace.read(cx).workspaces().to_vec(); + let workspaces = multi_workspace + .read(cx) + .workspaces() + .cloned() + .collect::>(); let thread_store = ThreadMetadataStore::global(cx); let sidebar_thread_ids: HashSet = sidebar @@ -5299,23 +5507,82 @@ mod property_test { .collect(); let mut metadata_thread_ids: HashSet = HashSet::default(); + + // Query using the same approach as the sidebar: iterate project + // group keys, then do main + legacy queries per group. + let mw = multi_workspace.read(cx); + let mut workspaces_by_group: HashMap>> = + HashMap::default(); for workspace in &workspaces { - let path_list = workspace_path_list(workspace, cx); + let key = workspace.read(cx).project_group_key(cx); + workspaces_by_group + .entry(key) + .or_default() + .push(workspace.clone()); + } + + for group_key in mw.project_group_keys() { + let path_list = group_key.path_list().clone(); if path_list.paths().is_empty() { continue; } + + let group_workspaces = workspaces_by_group + .get(group_key) + .map(|ws| ws.as_slice()) + .unwrap_or_default(); + + // Main code path queries (run for all groups, even without workspaces). + for metadata in thread_store + .read(cx) + .entries_for_main_worktree_path(&path_list) + { + metadata_thread_ids.insert(metadata.session_id.clone()); + } for metadata in thread_store.read(cx).entries_for_path(&path_list) { metadata_thread_ids.insert(metadata.session_id.clone()); } - for snapshot in root_repository_snapshots(workspace, cx) { - for linked_worktree in snapshot.linked_worktrees() { - let worktree_path_list = - PathList::new(std::slice::from_ref(&linked_worktree.path)); - for metadata in thread_store.read(cx).entries_for_path(&worktree_path_list) { + + // Legacy: per-workspace queries for different root paths. + let covered_paths: HashSet = group_workspaces + .iter() + .flat_map(|ws| { + ws.read(cx) + .root_paths(cx) + .into_iter() + .map(|p| p.to_path_buf()) + }) + .collect(); + + for workspace in group_workspaces { + let ws_path_list = workspace_path_list(workspace, cx); + if ws_path_list != path_list { + for metadata in thread_store.read(cx).entries_for_path(&ws_path_list) { metadata_thread_ids.insert(metadata.session_id.clone()); } } } + + for workspace in group_workspaces { + for snapshot in root_repository_snapshots(workspace, cx) { + let repo_path_list = + PathList::new(&[snapshot.original_repo_abs_path.to_path_buf()]); + if repo_path_list != path_list { + continue; + } + for linked_worktree in snapshot.linked_worktrees() { + if covered_paths.contains(&*linked_worktree.path) { + continue; + } + let worktree_path_list = + PathList::new(std::slice::from_ref(&linked_worktree.path)); + for metadata in thread_store.read(cx).entries_for_path(&worktree_path_list) + { + metadata_thread_ids.insert(metadata.session_id.clone()); + } + } + } + } } anyhow::ensure!( diff --git a/crates/task/src/task.rs b/crates/task/src/task.rs index ba5f4ae4fed9e676add2eafc8dc14f47cb2200ed..5126d5e89f723f0a9612c2033a789c569111b20a 100644 --- a/crates/task/src/task.rs +++ b/crates/task/src/task.rs @@ -23,8 +23,8 @@ pub use debug_format::{ Request, TcpArgumentsTemplate, ZedDebugConfig, }; pub use task_template::{ - DebugArgsRequest, HideStrategy, RevealStrategy, SaveStrategy, TaskTemplate, TaskTemplates, - substitute_variables_in_map, substitute_variables_in_str, + DebugArgsRequest, HideStrategy, RevealStrategy, SaveStrategy, TaskHook, TaskTemplate, + TaskTemplates, substitute_variables_in_map, substitute_variables_in_str, }; pub use util::shell::{Shell, ShellKind}; pub use util::shell_builder::ShellBuilder; @@ -181,6 +181,10 @@ pub enum VariableName { /// Open a Picker to select a process ID to use in place /// Can only be used to debug configurations PickProcessId, + /// An absolute path of the main (original) git worktree for the current repository. + /// For normal checkouts, this equals the worktree root. For linked worktrees, + /// this is the original repo's working directory. + MainGitWorktree, /// Custom variable, provided by the plugin or other external source. /// Will be printed with `CUSTOM_` prefix to avoid potential conflicts with other variables. Custom(Cow<'static, str>), @@ -216,6 +220,7 @@ impl FromStr for VariableName { "LANGUAGE" => Self::Language, "ROW" => Self::Row, "COLUMN" => Self::Column, + "MAIN_GIT_WORKTREE" => Self::MainGitWorktree, _ => { if let Some(custom_name) = without_prefix.strip_prefix(ZED_CUSTOM_VARIABLE_NAME_PREFIX) @@ -251,6 +256,7 @@ impl std::fmt::Display for VariableName { Self::Language => write!(f, "{ZED_VARIABLE_NAME_PREFIX}LANGUAGE"), Self::RunnableSymbol => write!(f, "{ZED_VARIABLE_NAME_PREFIX}RUNNABLE_SYMBOL"), Self::PickProcessId => write!(f, "{ZED_VARIABLE_NAME_PREFIX}PICK_PID"), + Self::MainGitWorktree => write!(f, "{ZED_VARIABLE_NAME_PREFIX}MAIN_GIT_WORKTREE"), Self::Custom(s) => write!( f, "{ZED_VARIABLE_NAME_PREFIX}{ZED_CUSTOM_VARIABLE_NAME_PREFIX}{s}" diff --git a/crates/task/src/task_template.rs b/crates/task/src/task_template.rs index cee6024ca62fb1ed74489f55ae99f6334db3d0f0..25fde261f106d57eef94c4d2ef7cad57b3a7ecd0 100644 --- a/crates/task/src/task_template.rs +++ b/crates/task/src/task_template.rs @@ -75,6 +75,9 @@ pub struct TaskTemplate { /// Which edited buffers to save before running the task. #[serde(default)] pub save: SaveStrategy, + /// Hooks that this task runs when emitted. + #[serde(default)] + pub hooks: HashSet, } #[derive(Deserialize, Eq, PartialEq, Clone, Debug)] @@ -86,6 +89,14 @@ pub enum DebugArgsRequest { Attach(AttachRequest), } +/// What to do with the terminal pane and tab, after the command was started. +#[derive(Clone, Copy, Debug, PartialEq, Hash, Eq, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum TaskHook { + #[serde(alias = "create_git_worktree")] + CreateWorktree, +} + /// What to do with the terminal pane and tab, after the command was started. #[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] @@ -116,11 +127,11 @@ pub enum HideStrategy { #[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum SaveStrategy { - #[default] /// Save all edited buffers. All, /// Save the current buffer. Current, + #[default] /// Don't save any buffers. None, } diff --git a/crates/tasks_ui/src/tasks_ui.rs b/crates/tasks_ui/src/tasks_ui.rs index da351ad410d078e79aa4c3038fcf88184bc648fa..ca8ebb5248e4e6d77a05efab8d43dbfbd8d02eca 100644 --- a/crates/tasks_ui/src/tasks_ui.rs +++ b/crates/tasks_ui/src/tasks_ui.rs @@ -321,13 +321,11 @@ pub fn task_contexts( }) .unwrap_or_default(); - let latest_selection = active_editor.as_ref().map(|active_editor| { - active_editor - .read(cx) - .selections - .newest_anchor() - .head() - .text_anchor + let latest_selection = active_editor.as_ref().and_then(|active_editor| { + let snapshot = active_editor.read(cx).buffer().read(cx).snapshot(cx); + snapshot + .anchor_to_buffer_anchor(active_editor.read(cx).selections.newest_anchor().head()) + .map(|(anchor, _)| anchor) }); let mut worktree_abs_paths = workspace @@ -436,7 +434,9 @@ mod tests { ) .await; let project = Project::test(fs, [path!("/dir").as_ref()], cx).await; - let worktree_store = project.read_with(cx, |project, _| project.worktree_store()); + let (worktree_store, git_store) = project.read_with(cx, |project, _| { + (project.worktree_store(), project.git_store().clone()) + }); let rust_language = Arc::new( Language::new( LanguageConfig { @@ -453,6 +453,7 @@ mod tests { .unwrap() .with_context_provider(Some(Arc::new(BasicContextProvider::new( worktree_store.clone(), + git_store.clone(), )))), ); @@ -476,6 +477,7 @@ mod tests { .unwrap() .with_context_provider(Some(Arc::new(BasicContextProvider::new( worktree_store.clone(), + git_store.clone(), )))), ); diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index 0c9bbcbec32dcd0fbb8240d524b83f461ac778c3..acccd6129f75ee2f5213fa359203220a7fee08c0 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -850,6 +850,7 @@ impl TerminalView { fn send_text(&mut self, text: &SendText, _: &mut Window, cx: &mut Context) { self.clear_bell(cx); + self.blink_manager.update(cx, BlinkManager::pause_blinking); self.terminal.update(cx, |term, _| { term.input(text.0.to_string().into_bytes()); }); @@ -858,6 +859,7 @@ impl TerminalView { fn send_keystroke(&mut self, text: &SendKeystroke, _: &mut Window, cx: &mut Context) { if let Some(keystroke) = Keystroke::parse(&text.0).log_err() { self.clear_bell(cx); + self.blink_manager.update(cx, BlinkManager::pause_blinking); self.process_keystroke(&keystroke, cx); } } @@ -1820,6 +1822,7 @@ impl SearchableItem for TerminalView { regex: true, replacement: false, selection: false, + select_all: false, find_in_results: false, } } diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 5c4cce0f11d7db7b7593631e796c0f5e3d50adab..4dbe0e377afb86d176e8cd336e186d209a9d3c78 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -24,7 +24,7 @@ pub struct Anchor { /// Whether this anchor stays attached to the character *before* or *after* /// the offset. pub bias: Bias, - pub buffer_id: Option, + pub buffer_id: BufferId, } impl Debug for Anchor { @@ -46,28 +46,7 @@ impl Debug for Anchor { } impl Anchor { - pub const MIN: Self = Self { - timestamp_replica_id: clock::Lamport::MIN.replica_id, - timestamp_value: clock::Lamport::MIN.value, - offset: u32::MIN, - bias: Bias::Left, - buffer_id: None, - }; - - pub const MAX: Self = Self { - timestamp_replica_id: clock::Lamport::MAX.replica_id, - timestamp_value: clock::Lamport::MAX.value, - offset: u32::MAX, - bias: Bias::Right, - buffer_id: None, - }; - - pub fn new( - timestamp: clock::Lamport, - offset: u32, - bias: Bias, - buffer_id: Option, - ) -> Self { + pub fn new(timestamp: clock::Lamport, offset: u32, bias: Bias, buffer_id: BufferId) -> Self { Self { timestamp_replica_id: timestamp.replica_id, timestamp_value: timestamp.value, @@ -83,7 +62,7 @@ impl Anchor { timestamp_value: clock::Lamport::MIN.value, offset: u32::MIN, bias: Bias::Left, - buffer_id: Some(buffer_id), + buffer_id, } } @@ -93,7 +72,7 @@ impl Anchor { timestamp_value: clock::Lamport::MAX.value, offset: u32::MAX, bias: Bias::Right, - buffer_id: Some(buffer_id), + buffer_id, } } @@ -171,7 +150,7 @@ impl Anchor { pub fn is_valid(&self, buffer: &BufferSnapshot) -> bool { if self.is_min() || self.is_max() { true - } else if self.buffer_id.is_none_or(|id| id != buffer.remote_id) { + } else if self.buffer_id != buffer.remote_id { false } else { let Some(fragment_id) = buffer.try_fragment_id_for_anchor(self) else { @@ -207,6 +186,18 @@ impl Anchor { value: self.timestamp_value, } } + + pub fn opaque_id(&self) -> [u8; 20] { + let mut bytes = [0u8; 20]; + let buffer_id: u64 = self.buffer_id.into(); + bytes[0..8].copy_from_slice(&buffer_id.to_le_bytes()); + bytes[8..12].copy_from_slice(&self.offset.to_le_bytes()); + bytes[12..16].copy_from_slice(&self.timestamp_value.to_le_bytes()); + let replica_id = self.timestamp_replica_id.as_u16(); + bytes[16..18].copy_from_slice(&replica_id.to_le_bytes()); + bytes[18] = self.bias as u8; + bytes + } } pub trait OffsetRangeExt { @@ -237,6 +228,7 @@ where pub trait AnchorRangeExt { fn cmp(&self, b: &Range, buffer: &BufferSnapshot) -> Ordering; fn overlaps(&self, b: &Range, buffer: &BufferSnapshot) -> bool; + fn contains_anchor(&self, b: Anchor, buffer: &BufferSnapshot) -> bool; } impl AnchorRangeExt for Range { @@ -250,4 +242,8 @@ impl AnchorRangeExt for Range { fn overlaps(&self, other: &Range, buffer: &BufferSnapshot) -> bool { self.start.cmp(&other.end, buffer).is_lt() && other.start.cmp(&self.end, buffer).is_lt() } + + fn contains_anchor(&self, other: Anchor, buffer: &BufferSnapshot) -> bool { + self.start.cmp(&other, buffer).is_le() && self.end.cmp(&other, buffer).is_ge() + } } diff --git a/crates/text/src/patch.rs b/crates/text/src/patch.rs index eff3d0af110763074d7ca9fdc7842d45eece03c1..376d284473d09df16b93a609c8d49c443aa8a4ab 100644 --- a/crates/text/src/patch.rs +++ b/crates/text/src/patch.rs @@ -56,7 +56,10 @@ where if edit.is_empty() { return; } + self.push_maybe_empty(edit); + } + pub fn push_maybe_empty(&mut self, edit: Edit) { if let Some(last) = self.0.last_mut() { if last.old.end >= edit.old.start { last.old.end = edit.old.end; diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index b8f2ce6ce9b66040b4e633d28bfb42e1791a38ca..026f1272790740c9c2277004e8e96800d87bab15 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -2377,7 +2377,7 @@ impl BufferSnapshot { pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator where D: 'a + TextDimension, - A: 'a + IntoIterator, + A: 'a + IntoIterator, { let anchors = anchors.into_iter(); self.summaries_for_anchors_with_payload::(anchors.map(|a| (a, ()))) @@ -2390,7 +2390,7 @@ impl BufferSnapshot { ) -> impl 'a + Iterator where D: 'a + TextDimension, - A: 'a + IntoIterator, + A: 'a + IntoIterator, { let anchors = anchors.into_iter(); let mut fragment_cursor = self @@ -2406,7 +2406,7 @@ impl BufferSnapshot { return (D::from_text_summary(&self.visible_text.summary()), payload); } - let Some(insertion) = self.try_find_fragment(anchor) else { + let Some(insertion) = self.try_find_fragment(&anchor) else { panic!( "invalid insertion for buffer {}@{:?} with anchor {:?}", self.remote_id(), @@ -2457,7 +2457,7 @@ impl BufferSnapshot { } else if anchor.is_max() { self.visible_text.len() } else { - debug_assert_eq!(anchor.buffer_id, Some(self.remote_id)); + debug_assert_eq!(anchor.buffer_id, self.remote_id); debug_assert!( self.version.observed(anchor.timestamp()), "Anchor timestamp {:?} not observed by buffer {:?}", @@ -2489,7 +2489,7 @@ impl BufferSnapshot { #[cold] fn panic_bad_anchor(&self, anchor: &Anchor) -> ! { - if anchor.buffer_id.is_some_and(|id| id != self.remote_id) { + if anchor.buffer_id != self.remote_id { panic!( "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}", self.remote_id, self.version @@ -2553,12 +2553,12 @@ impl BufferSnapshot { } /// Returns an anchor range for the given input position range that is anchored to the text in the range. - pub fn anchor_range_around(&self, position: Range) -> Range { + pub fn anchor_range_inside(&self, position: Range) -> Range { self.anchor_after(position.start)..self.anchor_before(position.end) } /// Returns an anchor range for the given input position range that is anchored to the text before and after. - pub fn anchor_range_between(&self, position: Range) -> Range { + pub fn anchor_range_outside(&self, position: Range) -> Range { self.anchor_before(position.start)..self.anchor_after(position.end) } @@ -2608,7 +2608,7 @@ impl BufferSnapshot { fragment.timestamp, fragment.insertion_offset + overshoot as u32, bias, - Some(self.remote_id), + self.remote_id, ) } } @@ -2616,8 +2616,7 @@ impl BufferSnapshot { pub fn can_resolve(&self, anchor: &Anchor) -> bool { anchor.is_min() || anchor.is_max() - || (Some(self.remote_id) == anchor.buffer_id - && self.version.observed(anchor.timestamp())) + || (self.remote_id == anchor.buffer_id && self.version.observed(anchor.timestamp())) } pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { @@ -2643,7 +2642,10 @@ impl BufferSnapshot { where D: TextDimension + Ord, { - self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX) + self.edits_since_in_range( + since, + Anchor::min_for_buffer(self.remote_id)..Anchor::max_for_buffer(self.remote_id), + ) } pub fn anchored_edits_since<'a, D>( @@ -2653,7 +2655,10 @@ impl BufferSnapshot { where D: TextDimension + Ord, { - self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX) + self.anchored_edits_since_in_range( + since, + Anchor::min_for_buffer(self.remote_id)..Anchor::max_for_buffer(self.remote_id), + ) } pub fn edits_since_in_range<'a, D>( @@ -2916,13 +2921,13 @@ impl bool> Iterator for Ed fragment.timestamp, fragment.insertion_offset, Bias::Right, - Some(self.buffer_id), + self.buffer_id, ); let end_anchor = Anchor::new( fragment.timestamp, fragment.insertion_offset + fragment.len, Bias::Left, - Some(self.buffer_id), + self.buffer_id, ); if !fragment.was_visible(self.since, self.undos) && fragment.visible { diff --git a/crates/theme/src/fallback_themes.rs b/crates/theme/src/fallback_themes.rs index ba7f600fb05cc160f8d2668cf549853c8ae39ebe..a739df3213d297ce8230cfb62a08c91928bd62df 100644 --- a/crates/theme/src/fallback_themes.rs +++ b/crates/theme/src/fallback_themes.rs @@ -357,7 +357,7 @@ pub(crate) fn zed_default_dark() -> Theme { ("number".into(), orange.into()), ("operator".into(), HighlightStyle::default()), ("predictive".into(), HighlightStyle::default()), - ("preproc".into(), HighlightStyle::default()), + ("preproc".into(), purple.into()), ("primary".into(), HighlightStyle::default()), ("property".into(), red.into()), ("punctuation".into(), HighlightStyle::default()), @@ -377,6 +377,8 @@ pub(crate) fn zed_default_dark() -> Theme { ("variable".into(), HighlightStyle::default()), ("variable.special".into(), red.into()), ("variant".into(), HighlightStyle::default()), + ("diff.plus".into(), green.into()), + ("diff.minus".into(), red.into()), ])), }, } diff --git a/crates/title_bar/src/title_bar.rs b/crates/title_bar/src/title_bar.rs index 440249907adb6d29602ad8e950d0fd26a2d1c31d..dfcd933dc20df9a6f6643402719f2ec1143cc7fe 100644 --- a/crates/title_bar/src/title_bar.rs +++ b/crates/title_bar/src/title_bar.rs @@ -740,7 +740,6 @@ impl TitleBar { .map(|mw| { mw.read(cx) .workspaces() - .iter() .filter_map(|ws| ws.read(cx).database_id()) .collect() }) @@ -803,7 +802,6 @@ impl TitleBar { .map(|mw| { mw.read(cx) .workspaces() - .iter() .filter_map(|ws| ws.read(cx).database_id()) .collect() }) diff --git a/crates/toolchain_selector/src/active_toolchain.rs b/crates/toolchain_selector/src/active_toolchain.rs index e3766e73bbc29d9548f785018e9f4aa40ab968a1..a9218564b5567d86f097781b224ac0658a0d5221 100644 --- a/crates/toolchain_selector/src/active_toolchain.rs +++ b/crates/toolchain_selector/src/active_toolchain.rs @@ -117,7 +117,7 @@ impl ActiveToolchain { cx: &mut Context, ) { let editor = editor.read(cx); - if let Some((_, buffer, _)) = editor.active_excerpt(cx) + if let Some(buffer) = editor.active_buffer(cx) && let Some(worktree_id) = buffer.read(cx).file().map(|file| file.worktree_id(cx)) { let subscription = cx.subscribe_in( diff --git a/crates/toolchain_selector/src/toolchain_selector.rs b/crates/toolchain_selector/src/toolchain_selector.rs index 7447975aa835c7a4c73068d20b55619f7db5231c..010003cd572f85b1aa8e6d31b0fc0a511f2ebd7f 100644 --- a/crates/toolchain_selector/src/toolchain_selector.rs +++ b/crates/toolchain_selector/src/toolchain_selector.rs @@ -584,11 +584,11 @@ impl ToolchainSelector { window: &mut Window, cx: &mut Context, ) -> Option<()> { - let (_, buffer, _) = workspace + let buffer = workspace .active_item(cx)? .act_as::(cx)? .read(cx) - .active_excerpt(cx)?; + .active_buffer(cx)?; let project = workspace.project().clone(); let language_name = buffer.read(cx).language()?.name(); diff --git a/crates/ui/src/components.rs b/crates/ui/src/components.rs index 68b1ff9beb7a8918ee3f5e1857e3cc68e15a3fc1..367d80d79c9af8722091e36c8e04bafb7ef0d8b5 100644 --- a/crates/ui/src/components.rs +++ b/crates/ui/src/components.rs @@ -29,6 +29,7 @@ mod notification; mod popover; mod popover_menu; mod progress; +mod redistributable_columns; mod right_click_menu; mod scrollbar; mod stack; @@ -73,6 +74,7 @@ pub use notification::*; pub use popover::*; pub use popover_menu::*; pub use progress::*; +pub use redistributable_columns::*; pub use right_click_menu::*; pub use scrollbar::*; pub use stack::*; diff --git a/crates/ui/src/components/ai/thread_item.rs b/crates/ui/src/components/ai/thread_item.rs index d6b5f56e0abb33521ae69acc0b61b36b015cf987..7658946b6395d6314d90db52716020a922c85ccc 100644 --- a/crates/ui/src/components/ai/thread_item.rs +++ b/crates/ui/src/components/ai/thread_item.rs @@ -1,7 +1,4 @@ -use crate::{ - CommonAnimationExt, DecoratedIcon, DiffStat, GradientFade, HighlightedLabel, IconDecoration, - IconDecorationKind, Tooltip, prelude::*, -}; +use crate::{CommonAnimationExt, DiffStat, GradientFade, HighlightedLabel, Tooltip, prelude::*}; use gpui::{ Animation, AnimationExt, AnyView, ClickEvent, Hsla, MouseButton, SharedString, @@ -218,7 +215,7 @@ impl RenderOnce for ThreadItem { let color = cx.theme().colors(); let sidebar_base_bg = color .title_bar_background - .blend(color.panel_background.opacity(0.32)); + .blend(color.panel_background.opacity(0.25)); let raw_bg = self.base_bg.unwrap_or(sidebar_base_bg); let apparent_bg = color.background.blend(raw_bg); @@ -266,31 +263,31 @@ impl RenderOnce for ThreadItem { Icon::new(self.icon).color(icon_color).size(IconSize::Small) }; - let decoration = |icon: IconDecorationKind, color: Hsla| { - IconDecoration::new(icon, base_bg, cx) - .color(color) - .position(gpui::Point { - x: px(-2.), - y: px(-2.), - }) - }; - - let (decoration, icon_tooltip) = if self.status == AgentThreadStatus::Error { + let (status_icon, icon_tooltip) = if self.status == AgentThreadStatus::Error { ( - Some(decoration(IconDecorationKind::X, cx.theme().status().error)), + Some( + Icon::new(IconName::Close) + .size(IconSize::Small) + .color(Color::Error), + ), Some("Thread has an Error"), ) } else if self.status == AgentThreadStatus::WaitingForConfirmation { ( - Some(decoration( - IconDecorationKind::Triangle, - cx.theme().status().warning, - )), + Some( + Icon::new(IconName::Warning) + .size(IconSize::XSmall) + .color(Color::Warning), + ), Some("Thread is Waiting for Confirmation"), ) } else if self.notified { ( - Some(decoration(IconDecorationKind::Dot, color.text_accent)), + Some( + Icon::new(IconName::Circle) + .size(IconSize::Small) + .color(Color::Accent), + ), Some("Thread's Generation is Complete"), ) } else { @@ -306,9 +303,9 @@ impl RenderOnce for ThreadItem { .with_rotate_animation(2), ) .into_any_element() - } else if let Some(decoration) = decoration { + } else if let Some(status_icon) = status_icon { icon_container() - .child(DecoratedIcon::new(agent_icon, Some(decoration))) + .child(status_icon) .when_some(icon_tooltip, |icon, tooltip| { icon.tooltip(Tooltip::text(tooltip)) }) @@ -551,12 +548,17 @@ impl Component for ThreadItem { } fn preview(_window: &mut Window, cx: &mut App) -> Option { + let color = cx.theme().colors(); + let bg = color + .title_bar_background + .blend(color.panel_background.opacity(0.25)); + let container = || { v_flex() .w_72() .border_1() - .border_color(cx.theme().colors().border_variant) - .bg(cx.theme().colors().panel_background) + .border_color(color.border_variant) + .bg(bg) }; let thread_item_examples = vec![ @@ -570,16 +572,6 @@ impl Component for ThreadItem { ) .into_any_element(), ), - single_example( - "Timestamp Only (hours)", - container() - .child( - ThreadItem::new("ti-1b", "Thread with just a timestamp") - .icon(IconName::AiClaude) - .timestamp("3h"), - ) - .into_any_element(), - ), single_example( "Notified (weeks)", container() diff --git a/crates/ui/src/components/data_table.rs b/crates/ui/src/components/data_table.rs index 2012defc47d9cccea87849fa41470ad1183b552f..e5a14a3ddabc0d918bfe6d6bcb077e32adeb6eb4 100644 --- a/crates/ui/src/components/data_table.rs +++ b/crates/ui/src/components/data_table.rs @@ -1,19 +1,19 @@ use std::{ops::Range, rc::Rc}; use gpui::{ - AbsoluteLength, AppContext as _, DefiniteLength, DragMoveEvent, Entity, EntityId, FocusHandle, - Length, ListHorizontalSizingBehavior, ListSizingBehavior, ListState, Point, Stateful, - UniformListScrollHandle, WeakEntity, list, transparent_black, uniform_list, + DefiniteLength, Entity, EntityId, FocusHandle, Length, ListHorizontalSizingBehavior, + ListSizingBehavior, ListState, Point, Stateful, UniformListScrollHandle, WeakEntity, list, + transparent_black, uniform_list, }; -use itertools::intersperse_with; use crate::{ ActiveTheme as _, AnyElement, App, Button, ButtonCommon as _, ButtonStyle, Color, Component, - ComponentScope, Context, Div, ElementId, FixedWidth as _, FluentBuilder as _, Indicator, - InteractiveElement, IntoElement, ParentElement, Pixels, RegisterComponent, RenderOnce, - ScrollAxes, ScrollableHandle, Scrollbars, SharedString, StatefulInteractiveElement, Styled, - StyledExt as _, StyledTypography, Window, WithScrollbar, div, example_group_with_title, h_flex, - px, single_example, + ComponentScope, Context, Div, ElementId, FixedWidth as _, FluentBuilder as _, HeaderResizeInfo, + Indicator, InteractiveElement, IntoElement, ParentElement, Pixels, RedistributableColumnsState, + RegisterComponent, RenderOnce, ScrollAxes, ScrollableHandle, Scrollbars, SharedString, + StatefulInteractiveElement, Styled, StyledExt as _, StyledTypography, Window, WithScrollbar, + bind_redistributable_columns, div, example_group_with_title, h_flex, px, + render_redistributable_columns_resize_handles, single_example, table_row::{IntoTableRow as _, TableRow}, v_flex, }; @@ -22,16 +22,10 @@ pub mod table_row; #[cfg(test)] mod tests; -const RESIZE_COLUMN_WIDTH: f32 = 8.0; -const RESIZE_DIVIDER_WIDTH: f32 = 1.0; - /// Represents an unchecked table row, which is a vector of elements. /// Will be converted into `TableRow` internally pub type UncheckedTableRow = Vec; -#[derive(Debug)] -pub(crate) struct DraggedColumn(pub(crate) usize); - struct UniformListData { render_list_of_rows_fn: Box, &mut Window, &mut App) -> Vec>>, @@ -113,124 +107,6 @@ impl TableInteractionState { } } -/// Renders invisible resize handles overlaid on top of table content. -/// -/// - Spacer: invisible element that matches the width of table column content -/// - Divider: contains the actual resize handle that users can drag to resize columns -/// -/// Structure: [spacer] [divider] [spacer] [divider] [spacer] -/// -/// Business logic: -/// 1. Creates spacers matching each column width -/// 2. Intersperses (inserts) resize handles between spacers (interactive only for resizable columns) -/// 3. Each handle supports hover highlighting, double-click to reset, and drag to resize -/// 4. Returns an absolute-positioned overlay that sits on top of table content -fn render_resize_handles( - column_widths: &TableRow, - resizable_columns: &TableRow, - initial_sizes: &TableRow, - columns: Option>, - window: &mut Window, - cx: &mut App, -) -> AnyElement { - let spacers = column_widths - .as_slice() - .iter() - .map(|width| base_cell_style(Some(*width)).into_any_element()); - - let mut column_ix = 0; - let resizable_columns_shared = Rc::new(resizable_columns.clone()); - let initial_sizes_shared = Rc::new(initial_sizes.clone()); - let mut resizable_columns_iter = resizable_columns.as_slice().iter(); - - let dividers = intersperse_with(spacers, || { - let resizable_columns = Rc::clone(&resizable_columns_shared); - let initial_sizes = Rc::clone(&initial_sizes_shared); - window.with_id(column_ix, |window| { - let mut resize_divider = div() - .id(column_ix) - .relative() - .top_0() - .w(px(RESIZE_DIVIDER_WIDTH)) - .h_full() - .bg(cx.theme().colors().border.opacity(0.8)); - - let mut resize_handle = div() - .id("column-resize-handle") - .absolute() - .left_neg_0p5() - .w(px(RESIZE_COLUMN_WIDTH)) - .h_full(); - - if resizable_columns_iter - .next() - .is_some_and(TableResizeBehavior::is_resizable) - { - let hovered = window.use_state(cx, |_window, _cx| false); - - resize_divider = resize_divider.when(*hovered.read(cx), |div| { - div.bg(cx.theme().colors().border_focused) - }); - - resize_handle = resize_handle - .on_hover(move |&was_hovered, _, cx| hovered.write(cx, was_hovered)) - .cursor_col_resize() - .when_some(columns.clone(), |this, columns| { - this.on_click(move |event, window, cx| { - if event.click_count() >= 2 { - columns.update(cx, |columns, _| { - columns.on_double_click( - column_ix, - &initial_sizes, - &resizable_columns, - window, - ); - }) - } - - cx.stop_propagation(); - }) - }) - .on_drag(DraggedColumn(column_ix), |_, _offset, _window, cx| { - cx.new(|_cx| gpui::Empty) - }) - } - - column_ix += 1; - resize_divider.child(resize_handle).into_any_element() - }) - }); - - h_flex() - .id("resize-handles") - .absolute() - .inset_0() - .w_full() - .children(dividers) - .into_any_element() -} - -#[derive(Debug, Copy, Clone, PartialEq)] -pub enum TableResizeBehavior { - None, - Resizable, - MinSize(f32), -} - -impl TableResizeBehavior { - pub fn is_resizable(&self) -> bool { - *self != TableResizeBehavior::None - } - - pub fn min_size(&self) -> Option { - match self { - TableResizeBehavior::None => None, - TableResizeBehavior::Resizable => Some(0.05), - TableResizeBehavior::MinSize(min_size) => Some(*min_size), - } - } -} - pub enum ColumnWidthConfig { /// Static column widths (no resize handles). Static { @@ -278,6 +154,21 @@ impl ColumnWidthConfig { } } + /// Explicit column widths with no fixed table width. + pub fn explicit>(widths: Vec) -> Self { + let cols = widths.len(); + ColumnWidthConfig::Static { + widths: StaticColumnWidths::Explicit( + widths + .into_iter() + .map(Into::into) + .collect::>() + .into_table_row(cols), + ), + table_width: None, + } + } + /// Column widths for rendering. pub fn widths_to_render(&self, cx: &App) -> Option> { match self { @@ -292,10 +183,7 @@ impl ColumnWidthConfig { ColumnWidthConfig::Redistributable { columns_state: entity, .. - } => { - let state = entity.read(cx); - Some(state.preview_widths.map_cloned(Length::Definite)) - } + } => Some(entity.read(cx).widths_to_render()), } } @@ -316,296 +204,6 @@ impl ColumnWidthConfig { None => ListHorizontalSizingBehavior::FitList, } } - - /// Render resize handles overlay if applicable. - pub fn render_resize_handles(&self, window: &mut Window, cx: &mut App) -> Option { - match self { - ColumnWidthConfig::Redistributable { - columns_state: entity, - .. - } => { - let (column_widths, resize_behavior, initial_widths) = { - let state = entity.read(cx); - ( - state.preview_widths.map_cloned(Length::Definite), - state.resize_behavior.clone(), - state.initial_widths.clone(), - ) - }; - Some(render_resize_handles( - &column_widths, - &resize_behavior, - &initial_widths, - Some(entity.clone()), - window, - cx, - )) - } - _ => None, - } - } - - /// Returns info needed for header double-click-to-reset, if applicable. - pub fn header_resize_info(&self, cx: &App) -> Option { - match self { - ColumnWidthConfig::Redistributable { columns_state, .. } => { - let state = columns_state.read(cx); - Some(HeaderResizeInfo { - columns_state: columns_state.downgrade(), - resize_behavior: state.resize_behavior.clone(), - initial_widths: state.initial_widths.clone(), - }) - } - _ => None, - } - } -} - -#[derive(Clone)] -pub struct HeaderResizeInfo { - pub columns_state: WeakEntity, - pub resize_behavior: TableRow, - pub initial_widths: TableRow, -} - -pub struct RedistributableColumnsState { - pub(crate) initial_widths: TableRow, - pub(crate) committed_widths: TableRow, - pub(crate) preview_widths: TableRow, - pub(crate) resize_behavior: TableRow, - pub(crate) cached_table_width: Pixels, -} - -impl RedistributableColumnsState { - pub fn new( - cols: usize, - initial_widths: UncheckedTableRow>, - resize_behavior: UncheckedTableRow, - ) -> Self { - let widths: TableRow = initial_widths - .into_iter() - .map(Into::into) - .collect::>() - .into_table_row(cols); - Self { - initial_widths: widths.clone(), - committed_widths: widths.clone(), - preview_widths: widths, - resize_behavior: resize_behavior.into_table_row(cols), - cached_table_width: Default::default(), - } - } - - pub fn cols(&self) -> usize { - self.committed_widths.cols() - } - - pub fn initial_widths(&self) -> &TableRow { - &self.initial_widths - } - - pub fn resize_behavior(&self) -> &TableRow { - &self.resize_behavior - } - - fn get_fraction(length: &DefiniteLength, bounds_width: Pixels, rem_size: Pixels) -> f32 { - match length { - DefiniteLength::Absolute(AbsoluteLength::Pixels(pixels)) => *pixels / bounds_width, - DefiniteLength::Absolute(AbsoluteLength::Rems(rems_width)) => { - rems_width.to_pixels(rem_size) / bounds_width - } - DefiniteLength::Fraction(fraction) => *fraction, - } - } - - pub(crate) fn on_double_click( - &mut self, - double_click_position: usize, - initial_sizes: &TableRow, - resize_behavior: &TableRow, - window: &mut Window, - ) { - let bounds_width = self.cached_table_width; - let rem_size = window.rem_size(); - let initial_sizes = - initial_sizes.map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); - let widths = self - .committed_widths - .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); - - let updated_widths = Self::reset_to_initial_size( - double_click_position, - widths, - initial_sizes, - resize_behavior, - ); - self.committed_widths = updated_widths.map(DefiniteLength::Fraction); - self.preview_widths = self.committed_widths.clone(); - } - - pub(crate) fn reset_to_initial_size( - col_idx: usize, - mut widths: TableRow, - initial_sizes: TableRow, - resize_behavior: &TableRow, - ) -> TableRow { - let diff = initial_sizes[col_idx] - widths[col_idx]; - - let left_diff = - initial_sizes[..col_idx].iter().sum::() - widths[..col_idx].iter().sum::(); - let right_diff = initial_sizes[col_idx + 1..].iter().sum::() - - widths[col_idx + 1..].iter().sum::(); - - let go_left_first = if diff < 0.0 { - left_diff > right_diff - } else { - left_diff < right_diff - }; - - if !go_left_first { - let diff_remaining = - Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, 1); - - if diff_remaining != 0.0 && col_idx > 0 { - Self::propagate_resize_diff( - diff_remaining, - col_idx, - &mut widths, - resize_behavior, - -1, - ); - } - } else { - let diff_remaining = - Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, -1); - - if diff_remaining != 0.0 { - Self::propagate_resize_diff( - diff_remaining, - col_idx, - &mut widths, - resize_behavior, - 1, - ); - } - } - - widths - } - - pub(crate) fn on_drag_move( - &mut self, - drag_event: &DragMoveEvent, - window: &mut Window, - cx: &mut Context, - ) { - let drag_position = drag_event.event.position; - let bounds = drag_event.bounds; - - let mut col_position = 0.0; - let rem_size = window.rem_size(); - let bounds_width = bounds.right() - bounds.left(); - let col_idx = drag_event.drag(cx).0; - - let divider_width = Self::get_fraction( - &DefiniteLength::Absolute(AbsoluteLength::Pixels(px(RESIZE_DIVIDER_WIDTH))), - bounds_width, - rem_size, - ); - - let mut widths = self - .committed_widths - .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); - - for length in widths[0..=col_idx].iter() { - col_position += length + divider_width; - } - - let mut total_length_ratio = col_position; - for length in widths[col_idx + 1..].iter() { - total_length_ratio += length; - } - let cols = self.resize_behavior.cols(); - total_length_ratio += (cols - 1 - col_idx) as f32 * divider_width; - - let drag_fraction = (drag_position.x - bounds.left()) / bounds_width; - let drag_fraction = drag_fraction * total_length_ratio; - let diff = drag_fraction - col_position - divider_width / 2.0; - - Self::drag_column_handle(diff, col_idx, &mut widths, &self.resize_behavior); - - self.preview_widths = widths.map(DefiniteLength::Fraction); - } - - pub(crate) fn drag_column_handle( - diff: f32, - col_idx: usize, - widths: &mut TableRow, - resize_behavior: &TableRow, - ) { - if diff > 0.0 { - Self::propagate_resize_diff(diff, col_idx, widths, resize_behavior, 1); - } else { - Self::propagate_resize_diff(-diff, col_idx + 1, widths, resize_behavior, -1); - } - } - - pub(crate) fn propagate_resize_diff( - diff: f32, - col_idx: usize, - widths: &mut TableRow, - resize_behavior: &TableRow, - direction: i8, - ) -> f32 { - let mut diff_remaining = diff; - if resize_behavior[col_idx].min_size().is_none() { - return diff; - } - - let step_right; - let step_left; - if direction < 0 { - step_right = 0; - step_left = 1; - } else { - step_right = 1; - step_left = 0; - } - if col_idx == 0 && direction < 0 { - return diff; - } - let mut curr_column = col_idx + step_right - step_left; - - while diff_remaining != 0.0 && curr_column < widths.cols() { - let Some(min_size) = resize_behavior[curr_column].min_size() else { - if curr_column == 0 { - break; - } - curr_column -= step_left; - curr_column += step_right; - continue; - }; - - let curr_width = widths[curr_column] - diff_remaining; - widths[curr_column] = curr_width; - - if min_size > curr_width { - diff_remaining = min_size - curr_width; - widths[curr_column] = min_size; - } else { - diff_remaining = 0.0; - break; - } - if curr_column == 0 { - break; - } - curr_column -= step_left; - curr_column += step_right; - } - widths[col_idx] = widths[col_idx] + (diff - diff_remaining); - - diff_remaining - } } /// A table component @@ -919,11 +517,8 @@ pub fn render_table_header( if event.click_count() > 1 { info.columns_state .update(cx, |column, _| { - column.on_double_click( - header_idx, - &info.initial_widths, - &info.resize_behavior, - window, + column.reset_column_to_initial_width( + header_idx, window, ); }) .ok(); @@ -962,6 +557,19 @@ impl TableRenderContext { disable_base_cell_style: table.disable_base_cell_style, } } + + pub fn for_column_widths(column_widths: Option>, use_ui_font: bool) -> Self { + Self { + striped: false, + show_row_borders: true, + show_row_hover: true, + total_row_count: 0, + column_widths, + map_row: None, + use_ui_font, + disable_base_cell_style: false, + } + } } impl RenderOnce for Table { @@ -969,9 +577,15 @@ impl RenderOnce for Table { let table_context = TableRenderContext::new(&self, cx); let interaction_state = self.interaction_state.and_then(|state| state.upgrade()); - let header_resize_info = interaction_state - .as_ref() - .and_then(|_| self.column_width_config.header_resize_info(cx)); + let header_resize_info = + interaction_state + .as_ref() + .and_then(|_| match &self.column_width_config { + ColumnWidthConfig::Redistributable { columns_state, .. } => { + Some(HeaderResizeInfo::from_state(columns_state, cx)) + } + _ => None, + }); let table_width = self.column_width_config.table_width(); let horizontal_sizing = self.column_width_config.list_horizontal_sizing(); @@ -985,13 +599,19 @@ impl RenderOnce for Table { ColumnWidthConfig::Redistributable { columns_state: entity, .. - } => Some(entity.downgrade()), + } => Some(entity.clone()), _ => None, }); - let resize_handles = interaction_state - .as_ref() - .and_then(|_| self.column_width_config.render_resize_handles(window, cx)); + let resize_handles = + interaction_state + .as_ref() + .and_then(|_| match &self.column_width_config { + ColumnWidthConfig::Redistributable { columns_state, .. } => Some( + render_redistributable_columns_resize_handles(columns_state, window, cx), + ), + _ => None, + }); let table = div() .when_some(table_width, |this, width| this.w(width)) @@ -1006,38 +626,8 @@ impl RenderOnce for Table { cx, )) }) - .when_some(redistributable_entity, { - |this, widths| { - this.on_drag_move::({ - let widths = widths.clone(); - move |e, window, cx| { - widths - .update(cx, |widths, cx| { - widths.on_drag_move(e, window, cx); - }) - .ok(); - } - }) - .on_children_prepainted({ - let widths = widths.clone(); - move |bounds, _, cx| { - widths - .update(cx, |widths, _| { - // This works because all children x axis bounds are the same - widths.cached_table_width = - bounds[0].right() - bounds[0].left(); - }) - .ok(); - } - }) - .on_drop::(move |_, _, cx| { - widths - .update(cx, |widths, _| { - widths.committed_widths = widths.preview_widths.clone(); - }) - .ok(); - }) - } + .when_some(redistributable_entity, |this, widths| { + bind_redistributable_columns(this, widths) }) .child({ let content = div() diff --git a/crates/ui/src/components/data_table/tests.rs b/crates/ui/src/components/data_table/tests.rs index 0936cd3088cc50bc08bf0a0a09d9a6fa7a2cdaf0..604e8b7cd1aabee85b406ec99d458c949eda599b 100644 --- a/crates/ui/src/components/data_table/tests.rs +++ b/crates/ui/src/components/data_table/tests.rs @@ -1,4 +1,5 @@ -use super::*; +use super::table_row::TableRow; +use crate::{RedistributableColumnsState, TableResizeBehavior}; fn is_almost_eq(a: &[f32], b: &[f32]) -> bool { a.len() == b.len() && a.iter().zip(b).all(|(x, y)| (x - y).abs() < 1e-6) diff --git a/crates/ui/src/components/redistributable_columns.rs b/crates/ui/src/components/redistributable_columns.rs new file mode 100644 index 0000000000000000000000000000000000000000..cd22c31e19736e72e5d88676178053b49a3e65fd --- /dev/null +++ b/crates/ui/src/components/redistributable_columns.rs @@ -0,0 +1,485 @@ +use std::rc::Rc; + +use gpui::{ + AbsoluteLength, AppContext as _, Bounds, DefiniteLength, DragMoveEvent, Empty, Entity, Length, + WeakEntity, +}; +use itertools::intersperse_with; + +use super::data_table::table_row::{IntoTableRow as _, TableRow}; +use crate::{ + ActiveTheme as _, AnyElement, App, Context, Div, FluentBuilder as _, InteractiveElement, + IntoElement, ParentElement, Pixels, StatefulInteractiveElement, Styled, Window, div, h_flex, + px, +}; + +const RESIZE_COLUMN_WIDTH: f32 = 8.0; +const RESIZE_DIVIDER_WIDTH: f32 = 1.0; + +#[derive(Debug)] +struct DraggedColumn(usize); + +#[derive(Debug, Copy, Clone, PartialEq)] +pub enum TableResizeBehavior { + None, + Resizable, + MinSize(f32), +} + +impl TableResizeBehavior { + pub fn is_resizable(&self) -> bool { + *self != TableResizeBehavior::None + } + + pub fn min_size(&self) -> Option { + match self { + TableResizeBehavior::None => None, + TableResizeBehavior::Resizable => Some(0.05), + TableResizeBehavior::MinSize(min_size) => Some(*min_size), + } + } +} + +#[derive(Clone)] +pub struct HeaderResizeInfo { + pub columns_state: WeakEntity, + pub resize_behavior: TableRow, +} + +impl HeaderResizeInfo { + pub fn from_state(columns_state: &Entity, cx: &App) -> Self { + let resize_behavior = columns_state.read(cx).resize_behavior().clone(); + Self { + columns_state: columns_state.downgrade(), + resize_behavior, + } + } +} + +pub struct RedistributableColumnsState { + pub(crate) initial_widths: TableRow, + pub(crate) committed_widths: TableRow, + pub(crate) preview_widths: TableRow, + pub(crate) resize_behavior: TableRow, + pub(crate) cached_container_width: Pixels, +} + +impl RedistributableColumnsState { + pub fn new( + cols: usize, + initial_widths: Vec>, + resize_behavior: Vec, + ) -> Self { + let widths: TableRow = initial_widths + .into_iter() + .map(Into::into) + .collect::>() + .into_table_row(cols); + Self { + initial_widths: widths.clone(), + committed_widths: widths.clone(), + preview_widths: widths, + resize_behavior: resize_behavior.into_table_row(cols), + cached_container_width: Default::default(), + } + } + + pub fn cols(&self) -> usize { + self.committed_widths.cols() + } + + pub fn initial_widths(&self) -> &TableRow { + &self.initial_widths + } + + pub fn preview_widths(&self) -> &TableRow { + &self.preview_widths + } + + pub fn resize_behavior(&self) -> &TableRow { + &self.resize_behavior + } + + pub fn widths_to_render(&self) -> TableRow { + self.preview_widths.map_cloned(Length::Definite) + } + + pub fn preview_fractions(&self, rem_size: Pixels) -> TableRow { + if self.cached_container_width > px(0.) { + self.preview_widths + .map_ref(|length| Self::get_fraction(length, self.cached_container_width, rem_size)) + } else { + self.preview_widths.map_ref(|length| match length { + DefiniteLength::Fraction(fraction) => *fraction, + DefiniteLength::Absolute(_) => 0.0, + }) + } + } + + pub fn preview_column_width(&self, column_index: usize, window: &Window) -> Option { + let width = self.preview_widths().as_slice().get(column_index)?; + match width { + DefiniteLength::Fraction(fraction) if self.cached_container_width > px(0.) => { + Some(self.cached_container_width * *fraction) + } + DefiniteLength::Fraction(_) => None, + DefiniteLength::Absolute(AbsoluteLength::Pixels(pixels)) => Some(*pixels), + DefiniteLength::Absolute(AbsoluteLength::Rems(rems_width)) => { + Some(rems_width.to_pixels(window.rem_size())) + } + } + } + + pub fn cached_container_width(&self) -> Pixels { + self.cached_container_width + } + + pub fn set_cached_container_width(&mut self, width: Pixels) { + self.cached_container_width = width; + } + + pub fn commit_preview(&mut self) { + self.committed_widths = self.preview_widths.clone(); + } + + pub fn reset_column_to_initial_width(&mut self, column_index: usize, window: &Window) { + let bounds_width = self.cached_container_width; + if bounds_width <= px(0.) { + return; + } + + let rem_size = window.rem_size(); + let initial_sizes = self + .initial_widths + .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); + let widths = self + .committed_widths + .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); + + let updated_widths = + Self::reset_to_initial_size(column_index, widths, initial_sizes, &self.resize_behavior); + self.committed_widths = updated_widths.map(DefiniteLength::Fraction); + self.preview_widths = self.committed_widths.clone(); + } + + fn get_fraction(length: &DefiniteLength, bounds_width: Pixels, rem_size: Pixels) -> f32 { + match length { + DefiniteLength::Absolute(AbsoluteLength::Pixels(pixels)) => *pixels / bounds_width, + DefiniteLength::Absolute(AbsoluteLength::Rems(rems_width)) => { + rems_width.to_pixels(rem_size) / bounds_width + } + DefiniteLength::Fraction(fraction) => *fraction, + } + } + + pub(crate) fn reset_to_initial_size( + col_idx: usize, + mut widths: TableRow, + initial_sizes: TableRow, + resize_behavior: &TableRow, + ) -> TableRow { + let diff = initial_sizes[col_idx] - widths[col_idx]; + + let left_diff = + initial_sizes[..col_idx].iter().sum::() - widths[..col_idx].iter().sum::(); + let right_diff = initial_sizes[col_idx + 1..].iter().sum::() + - widths[col_idx + 1..].iter().sum::(); + + let go_left_first = if diff < 0.0 { + left_diff > right_diff + } else { + left_diff < right_diff + }; + + if !go_left_first { + let diff_remaining = + Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, 1); + + if diff_remaining != 0.0 && col_idx > 0 { + Self::propagate_resize_diff( + diff_remaining, + col_idx, + &mut widths, + resize_behavior, + -1, + ); + } + } else { + let diff_remaining = + Self::propagate_resize_diff(diff, col_idx, &mut widths, resize_behavior, -1); + + if diff_remaining != 0.0 { + Self::propagate_resize_diff( + diff_remaining, + col_idx, + &mut widths, + resize_behavior, + 1, + ); + } + } + + widths + } + + fn on_drag_move( + &mut self, + drag_event: &DragMoveEvent, + window: &mut Window, + cx: &mut Context, + ) { + let drag_position = drag_event.event.position; + let bounds = drag_event.bounds; + let bounds_width = bounds.right() - bounds.left(); + if bounds_width <= px(0.) { + return; + } + + let mut col_position = 0.0; + let rem_size = window.rem_size(); + let col_idx = drag_event.drag(cx).0; + + let divider_width = Self::get_fraction( + &DefiniteLength::Absolute(AbsoluteLength::Pixels(px(RESIZE_DIVIDER_WIDTH))), + bounds_width, + rem_size, + ); + + let mut widths = self + .committed_widths + .map_ref(|length| Self::get_fraction(length, bounds_width, rem_size)); + + for length in widths[0..=col_idx].iter() { + col_position += length + divider_width; + } + + let mut total_length_ratio = col_position; + for length in widths[col_idx + 1..].iter() { + total_length_ratio += length; + } + let cols = self.resize_behavior.cols(); + total_length_ratio += (cols - 1 - col_idx) as f32 * divider_width; + + let drag_fraction = (drag_position.x - bounds.left()) / bounds_width; + let drag_fraction = drag_fraction * total_length_ratio; + let diff = drag_fraction - col_position - divider_width / 2.0; + + Self::drag_column_handle(diff, col_idx, &mut widths, &self.resize_behavior); + + self.preview_widths = widths.map(DefiniteLength::Fraction); + } + + pub(crate) fn drag_column_handle( + diff: f32, + col_idx: usize, + widths: &mut TableRow, + resize_behavior: &TableRow, + ) { + if diff > 0.0 { + Self::propagate_resize_diff(diff, col_idx, widths, resize_behavior, 1); + } else { + Self::propagate_resize_diff(-diff, col_idx + 1, widths, resize_behavior, -1); + } + } + + pub(crate) fn propagate_resize_diff( + diff: f32, + col_idx: usize, + widths: &mut TableRow, + resize_behavior: &TableRow, + direction: i8, + ) -> f32 { + let mut diff_remaining = diff; + if resize_behavior[col_idx].min_size().is_none() { + return diff; + } + + let step_right; + let step_left; + if direction < 0 { + step_right = 0; + step_left = 1; + } else { + step_right = 1; + step_left = 0; + } + if col_idx == 0 && direction < 0 { + return diff; + } + let mut curr_column = col_idx + step_right - step_left; + + while diff_remaining != 0.0 && curr_column < widths.cols() { + let Some(min_size) = resize_behavior[curr_column].min_size() else { + if curr_column == 0 { + break; + } + curr_column -= step_left; + curr_column += step_right; + continue; + }; + + let curr_width = widths[curr_column] - diff_remaining; + widths[curr_column] = curr_width; + + if min_size > curr_width { + diff_remaining = min_size - curr_width; + widths[curr_column] = min_size; + } else { + diff_remaining = 0.0; + break; + } + if curr_column == 0 { + break; + } + curr_column -= step_left; + curr_column += step_right; + } + widths[col_idx] = widths[col_idx] + (diff - diff_remaining); + + diff_remaining + } +} + +pub fn bind_redistributable_columns( + container: Div, + columns_state: Entity, +) -> Div { + container + .on_drag_move::({ + let columns_state = columns_state.clone(); + move |event, window, cx| { + columns_state.update(cx, |columns, cx| { + columns.on_drag_move(event, window, cx); + }); + } + }) + .on_children_prepainted({ + let columns_state = columns_state.clone(); + move |bounds, _, cx| { + if let Some(width) = child_bounds_width(&bounds) { + columns_state.update(cx, |columns, _| { + columns.set_cached_container_width(width); + }); + } + } + }) + .on_drop::(move |_, _, cx| { + columns_state.update(cx, |columns, _| { + columns.commit_preview(); + }); + }) +} + +pub fn render_redistributable_columns_resize_handles( + columns_state: &Entity, + window: &mut Window, + cx: &mut App, +) -> AnyElement { + let (column_widths, resize_behavior) = { + let state = columns_state.read(cx); + (state.widths_to_render(), state.resize_behavior().clone()) + }; + + let mut column_ix = 0; + let resize_behavior = Rc::new(resize_behavior); + let dividers = intersperse_with( + column_widths + .as_slice() + .iter() + .copied() + .map(|width| resize_spacer(width).into_any_element()), + || { + let current_column_ix = column_ix; + let resize_behavior = Rc::clone(&resize_behavior); + let columns_state = columns_state.clone(); + column_ix += 1; + + window.with_id(current_column_ix, |window| { + let mut resize_divider = div() + .id(current_column_ix) + .relative() + .top_0() + .w(px(RESIZE_DIVIDER_WIDTH)) + .h_full() + .bg(cx.theme().colors().border.opacity(0.8)); + + let mut resize_handle = div() + .id("column-resize-handle") + .absolute() + .left_neg_0p5() + .w(px(RESIZE_COLUMN_WIDTH)) + .h_full(); + + if resize_behavior[current_column_ix].is_resizable() { + let is_highlighted = window.use_state(cx, |_window, _cx| false); + + resize_divider = resize_divider.when(*is_highlighted.read(cx), |div| { + div.bg(cx.theme().colors().border_focused) + }); + + resize_handle = resize_handle + .on_hover({ + let is_highlighted = is_highlighted.clone(); + move |&was_hovered, _, cx| is_highlighted.write(cx, was_hovered) + }) + .cursor_col_resize() + .on_click({ + let columns_state = columns_state.clone(); + move |event, window, cx| { + if event.click_count() >= 2 { + columns_state.update(cx, |columns, _| { + columns.reset_column_to_initial_width( + current_column_ix, + window, + ); + }); + } + + cx.stop_propagation(); + } + }) + .on_drag(DraggedColumn(current_column_ix), { + let is_highlighted = is_highlighted.clone(); + move |_, _offset, _window, cx| { + is_highlighted.write(cx, true); + cx.new(|_cx| Empty) + } + }) + .on_drop::(move |_, _, cx| { + is_highlighted.write(cx, false); + columns_state.update(cx, |state, _| { + state.commit_preview(); + }); + }); + } + + resize_divider.child(resize_handle).into_any_element() + }) + }, + ); + + h_flex() + .id("resize-handles") + .absolute() + .inset_0() + .w_full() + .children(dividers) + .into_any_element() +} + +fn resize_spacer(width: Length) -> Div { + div().w(width).h_full() +} + +fn child_bounds_width(bounds: &[Bounds]) -> Option { + let first_bounds = bounds.first()?; + let mut left = first_bounds.left(); + let mut right = first_bounds.right(); + + for bound in bounds.iter().skip(1) { + left = left.min(bound.left()); + right = right.max(bound.right()); + } + + Some(right - left) +} diff --git a/crates/util/src/path_list.rs b/crates/util/src/path_list.rs index 0ea8bce6face2c248239c92e43a14ed010fb0c6e..af99f4c6570b35b004179afb87b737d3a4356489 100644 --- a/crates/util/src/path_list.rs +++ b/crates/util/src/path_list.rs @@ -38,7 +38,7 @@ impl Hash for PathList { } } -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct SerializedPathList { pub paths: String, pub order: String, @@ -65,6 +65,16 @@ impl PathList { self.paths.is_empty() } + /// Returns a new `PathList` with the given path removed. + pub fn without_path(&self, path_to_remove: &Path) -> PathList { + let paths: Vec = self + .ordered_paths() + .filter(|p| p.as_path() != path_to_remove) + .cloned() + .collect(); + PathList::new(&paths) + } + /// Get the paths in lexicographic order. pub fn paths(&self) -> &[PathBuf] { self.paths.as_ref() diff --git a/crates/util/src/shell_env.rs b/crates/util/src/shell_env.rs index e298530ac3cca3dd67f20609b1c3b7cc95fe4838..72c563abe52336c2b5ccc511746834a9a0384aeb 100644 --- a/crates/util/src/shell_env.rs +++ b/crates/util/src/shell_env.rs @@ -2,9 +2,21 @@ use std::path::Path; use anyhow::{Context as _, Result}; use collections::HashMap; +use serde::Deserialize; use crate::shell::ShellKind; +fn parse_env_map_from_noisy_output(output: &str) -> Result> { + for (position, _) in output.match_indices('{') { + let candidate = &output[position..]; + let mut deserializer = serde_json::Deserializer::from_str(candidate); + if let Ok(env_map) = HashMap::::deserialize(&mut deserializer) { + return Ok(env_map); + } + } + anyhow::bail!("Failed to find JSON in shell output: {output}") +} + pub fn print_env() { let env_vars: HashMap = std::env::vars().collect(); let json = serde_json::to_string_pretty(&env_vars).unwrap_or_else(|err| { @@ -109,10 +121,9 @@ async fn capture_unix( ); // Parse the JSON output from zed --printenv - let env_map: collections::HashMap = serde_json::from_str(&env_output) - .with_context(|| { - format!("Failed to deserialize environment variables from json: {env_output}") - })?; + let env_map = parse_env_map_from_noisy_output(&env_output).with_context(|| { + format!("Failed to deserialize environment variables from json: {env_output}") + })?; Ok(env_map) } @@ -213,14 +224,10 @@ async fn capture_windows( &format!("cd {}; {} --printenv", quoted_directory, zed_command), ]) } - ShellKind::Cmd => cmd.args([ - "/c", - "cd", - &directory_string, - "&&", - &zed_path_string, - "--printenv", - ]), + ShellKind::Cmd => { + let dir = directory_string.trim_end_matches('\\'); + cmd.args(["/d", "/c", "cd", dir, "&&", &zed_path_string, "--printenv"]) + } } .stdin(Stdio::null()) .stdout(Stdio::piped()) @@ -238,8 +245,7 @@ async fn capture_windows( ); let env_output = String::from_utf8_lossy(&output.stdout); - // Parse the JSON output from zed --printenv - serde_json::from_str(&env_output).with_context(|| { + parse_env_map_from_noisy_output(&env_output).with_context(|| { format!("Failed to deserialize environment variables from json: {env_output}") }) } diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index 362fed2df3543c5571f83db2c964a8c17fcebcb3..fd19a5dc400a24b9f27617c44bd71fe38073c757 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -1348,7 +1348,7 @@ impl Position { let snapshot = editor.snapshot(window, cx); let target = match self { Position::Line { row, offset } => { - if let Some(anchor) = editor.active_excerpt(cx).and_then(|(_, buffer, _)| { + if let Some(anchor) = editor.active_buffer(cx).and_then(|buffer| { editor.buffer().read(cx).buffer_point_to_anchor( &buffer, Point::new(row.saturating_sub(1), 0), @@ -2336,7 +2336,7 @@ impl Vim { match c { '%' => { self.update_editor(cx, |_, editor, cx| { - if let Some((_, buffer, _)) = editor.active_excerpt(cx) + if let Some(buffer) = editor.active_buffer(cx) && let Some(file) = buffer.read(cx).file() && let Some(local) = file.as_local() { diff --git a/crates/vim/src/helix.rs b/crates/vim/src/helix.rs index d2c8f4b78dcde8c4f2135b63ee3d07f04e01ebd5..923bd8c6a057819129b29b86e559c79a30f011f9 100644 --- a/crates/vim/src/helix.rs +++ b/crates/vim/src/helix.rs @@ -12,7 +12,6 @@ use editor::{ }; use gpui::actions; use gpui::{Context, Window}; -use itertools::Itertools as _; use language::{CharClassifier, CharKind, Point}; use search::{BufferSearchBar, SearchOptions}; use settings::Settings; @@ -941,19 +940,15 @@ impl Vim { editor.change_selections(SelectionEffects::default(), window, cx, |s| { let buffer = snapshot.buffer_snapshot(); - s.select_anchor_ranges( + s.select_ranges( prior_selections .iter() .cloned() .chain(s.all_anchors(&snapshot).iter().map(|s| s.range())) - .sorted_by(|a, b| { - a.start - .cmp(&b.start, buffer) - .then_with(|| a.end.cmp(&b.end, buffer)) - }) - .dedup_by(|a, b| { - a.start.cmp(&b.start, buffer).is_eq() - && a.end.cmp(&b.end, buffer).is_eq() + .map(|range| { + let start = range.start.to_offset(buffer); + let end = range.end.to_offset(buffer); + start..end }), ); }) @@ -2152,6 +2147,93 @@ mod test { cx.assert_state("hello two «oneˇ» two «oneˇ» two «oneˇ»", Mode::HelixSelect); } + #[gpui::test] + async fn test_helix_select_next_match_wrapping_from_normal(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + + // Exact repro for #51573: start in HelixNormal, search, then `v` to + // enter HelixSelect, then `n` past last match. + // + // In HelixNormal, search collapses the cursor to the match start. + // Pressing `v` expands by only one character, creating a partial + // selection that overlaps the full match range when the search wraps. + // The overlapping ranges must be merged (not just deduped) to avoid + // a backward-seeking rope cursor panic. + cx.set_state( + indoc! {" + searˇch term + stuff + search term + other stuff + "}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("/ t e r m"); + cx.simulate_keystrokes("enter"); + cx.simulate_keystrokes("v"); + cx.simulate_keystrokes("n"); + cx.simulate_keystrokes("n"); + // Should not panic when wrapping past last match. + cx.assert_state( + indoc! {" + search «termˇ» + stuff + search «termˇ» + other stuff + "}, + Mode::HelixSelect, + ); + } + + #[gpui::test] + async fn test_helix_select_star_then_match(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + + // Repro attempts for #52852: `*` searches for word under cursor, + // `v` enters select, `n` accumulates matches, `m` triggers match mode. + // Try multiple cursor positions and match counts. + + // Cursor on first occurrence, 3 more occurrences to select through + cx.set_state( + indoc! {" + ˇone two one three one four one + "}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("*"); + cx.simulate_keystrokes("v"); + cx.simulate_keystrokes("n n n"); + // Should not panic on wrapping `n`. + + // Cursor in the middle of text before matches + cx.set_state( + indoc! {" + heˇllo one two one three one + "}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("*"); + cx.simulate_keystrokes("v"); + cx.simulate_keystrokes("n"); + // Should not panic. + + // The original #52852 sequence: * v n n n then m m + cx.set_state( + indoc! {" + fn ˇfoo() { bar(foo()) } + fn baz() { foo() } + "}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("*"); + cx.simulate_keystrokes("v"); + cx.simulate_keystrokes("n n n"); + cx.simulate_keystrokes("m m"); + // Should not panic. + } + #[gpui::test] async fn test_helix_substitute(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; diff --git a/crates/vim/src/motion.rs b/crates/vim/src/motion.rs index 579af3d314ef114381de892b147d8d0a540656fb..6e992704f54bf7aba3cc775d906a90281234dbd0 100644 --- a/crates/vim/src/motion.rs +++ b/crates/vim/src/motion.rs @@ -1,16 +1,18 @@ use editor::{ Anchor, Bias, BufferOffset, DisplayPoint, Editor, MultiBufferOffset, RowExt, ToOffset, + ToPoint as _, display_map::{DisplayRow, DisplaySnapshot, FoldPoint, ToDisplayPoint}, movement::{ self, FindRange, TextLayoutDetails, find_boundary, find_preceding_boundary_display_point, }, }; use gpui::{Action, Context, Window, actions, px}; -use language::{CharKind, Point, Selection, SelectionGoal}; +use language::{CharKind, Point, Selection, SelectionGoal, TextObject, TreeSitterOptions}; use multi_buffer::MultiBufferRow; use schemars::JsonSchema; use serde::Deserialize; use std::{f64, ops::Range}; + use workspace::searchable::Direction; use crate::{ @@ -2340,39 +2342,19 @@ fn start_of_next_sentence( fn go_to_line(map: &DisplaySnapshot, display_point: DisplayPoint, line: usize) -> DisplayPoint { let point = map.display_point_to_point(display_point, Bias::Left); - let Some(mut excerpt) = map.buffer_snapshot().excerpt_containing(point..point) else { + let snapshot = map.buffer_snapshot(); + let Some((buffer_snapshot, _)) = snapshot.point_to_buffer_point(point) else { + return display_point; + }; + + let Some(anchor) = snapshot.anchor_in_excerpt(buffer_snapshot.anchor_after( + buffer_snapshot.clip_point(Point::new((line - 1) as u32, point.column), Bias::Left), + )) else { return display_point; }; - let offset = excerpt.buffer().point_to_offset( - excerpt - .buffer() - .clip_point(Point::new((line - 1) as u32, point.column), Bias::Left), - ); - let buffer_range = excerpt.buffer_range(); - if offset >= buffer_range.start.0 && offset <= buffer_range.end.0 { - let point = map - .buffer_snapshot() - .offset_to_point(excerpt.map_offset_from_buffer(BufferOffset(offset))); - return map.clip_point(map.point_to_display_point(point, Bias::Left), Bias::Left); - } - for (excerpt, buffer, range) in map.buffer_snapshot().excerpts() { - let excerpt_range = language::ToOffset::to_offset(&range.context.start, buffer) - ..language::ToOffset::to_offset(&range.context.end, buffer); - if offset >= excerpt_range.start && offset <= excerpt_range.end { - let text_anchor = buffer.anchor_after(offset); - let anchor = Anchor::in_buffer(excerpt, text_anchor); - return anchor.to_display_point(map); - } else if offset <= excerpt_range.start { - let anchor = Anchor::in_buffer(excerpt, range.context.start); - return anchor.to_display_point(map); - } - } map.clip_point( - map.point_to_display_point( - map.buffer_snapshot().clip_point(point, Bias::Left), - Bias::Left, - ), + map.point_to_display_point(anchor.to_point(snapshot), Bias::Left), Bias::Left, ) } @@ -2469,6 +2451,10 @@ fn find_matching_bracket_text_based( .take_while(|(_, char_offset)| *char_offset < line_range.end) .find_map(|(ch, char_offset)| get_bracket_pair(ch).map(|info| (info, char_offset))); + if bracket_info.is_none() { + return find_matching_c_preprocessor_directive(map, line_range); + } + let (open, close, is_opening) = bracket_info?.0; let bracket_offset = bracket_info?.1; @@ -2500,6 +2486,122 @@ fn find_matching_bracket_text_based( None } +fn find_matching_c_preprocessor_directive( + map: &DisplaySnapshot, + line_range: Range, +) -> Option { + let line_start = map + .buffer_chars_at(line_range.start) + .skip_while(|(c, _)| *c == ' ' || *c == '\t') + .map(|(c, _)| c) + .take(6) + .collect::(); + + if line_start.starts_with("#if") + || line_start.starts_with("#else") + || line_start.starts_with("#elif") + { + let mut depth = 0i32; + for (ch, char_offset) in map.buffer_chars_at(line_range.end) { + if ch != '\n' { + continue; + } + let mut line_offset = char_offset + '\n'.len_utf8(); + + // Skip leading whitespace + map.buffer_chars_at(line_offset) + .take_while(|(c, _)| *c == ' ' || *c == '\t') + .for_each(|(_, _)| line_offset += 1); + + // Check what directive starts the next line + let next_line_start = map + .buffer_chars_at(line_offset) + .map(|(c, _)| c) + .take(6) + .collect::(); + + if next_line_start.starts_with("#if") { + depth += 1; + } else if next_line_start.starts_with("#endif") { + if depth > 0 { + depth -= 1; + } else { + return Some(line_offset); + } + } else if next_line_start.starts_with("#else") || next_line_start.starts_with("#elif") { + if depth == 0 { + return Some(line_offset); + } + } + } + } else if line_start.starts_with("#endif") { + let mut depth = 0i32; + for (ch, char_offset) in + map.reverse_buffer_chars_at(line_range.start.saturating_sub_usize(1)) + { + let mut line_offset = if char_offset == MultiBufferOffset(0) { + MultiBufferOffset(0) + } else if ch != '\n' { + continue; + } else { + char_offset + '\n'.len_utf8() + }; + + // Skip leading whitespace + map.buffer_chars_at(line_offset) + .take_while(|(c, _)| *c == ' ' || *c == '\t') + .for_each(|(_, _)| line_offset += 1); + + // Check what directive starts this line + let line_start = map + .buffer_chars_at(line_offset) + .skip_while(|(c, _)| *c == ' ' || *c == '\t') + .map(|(c, _)| c) + .take(6) + .collect::(); + + if line_start.starts_with("\n\n") { + // empty line + continue; + } else if line_start.starts_with("#endif") { + depth += 1; + } else if line_start.starts_with("#if") { + if depth > 0 { + depth -= 1; + } else { + return Some(line_offset); + } + } + } + } + None +} + +fn comment_delimiter_pair( + map: &DisplaySnapshot, + offset: MultiBufferOffset, +) -> Option<(Range, Range)> { + let snapshot = map.buffer_snapshot(); + snapshot + .text_object_ranges(offset..offset, TreeSitterOptions::default()) + .find_map(|(range, obj)| { + if !matches!(obj, TextObject::InsideComment | TextObject::AroundComment) + || !range.contains(&offset) + { + return None; + } + + let mut chars = snapshot.chars_at(range.start); + if (Some('/'), Some('*')) != (chars.next(), chars.next()) { + return None; + } + + let open_range = range.start..range.start + 2usize; + let close_range = range.end - 2..range.end; + Some((open_range, close_range)) + }) +} + fn matching( map: &DisplaySnapshot, display_point: DisplayPoint, @@ -2627,6 +2729,32 @@ fn matching( continue; } + if let Some((open_range, close_range)) = comment_delimiter_pair(map, offset) { + if open_range.contains(&offset) { + return close_range.start.to_display_point(map); + } + + if close_range.contains(&offset) { + return open_range.start.to_display_point(map); + } + + let open_candidate = (open_range.start >= offset + && line_range.contains(&open_range.start)) + .then_some((open_range.start.saturating_sub(offset), close_range.start)); + + let close_candidate = (close_range.start >= offset + && line_range.contains(&close_range.start)) + .then_some((close_range.start.saturating_sub(offset), open_range.start)); + + if let Some((_, destination)) = [open_candidate, close_candidate] + .into_iter() + .flatten() + .min_by_key(|(distance, _)| *distance) + { + return destination.to_display_point(map); + } + } + closest_pair_destination .map(|destination| destination.to_display_point(map)) .unwrap_or_else(|| { @@ -3515,6 +3643,119 @@ mod test { ); } + #[gpui::test] + async fn test_matching_comments(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + cx.set_shared_state(indoc! {r"ˇ/* + this is a comment + */"}) + .await; + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r"/* + this is a comment + ˇ*/"}); + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r"ˇ/* + this is a comment + */"}); + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r"/* + this is a comment + ˇ*/"}); + + cx.set_shared_state("ˇ// comment").await; + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq("ˇ// comment"); + } + + #[gpui::test] + async fn test_matching_preprocessor_directives(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + + cx.set_shared_state(indoc! {r"#ˇif + + #else + + #endif + "}) + .await; + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r"#if + + ˇ#else + + #endif + "}); + + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r"#if + + #else + + ˇ#endif + "}); + + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r"ˇ#if + + #else + + #endif + "}); + + cx.set_shared_state(indoc! {r" + #ˇif + #if + + #else + + #endif + + #else + #endif + "}) + .await; + + cx.simulate_shared_keystrokes("%").await; + cx.shared_state().await.assert_eq(indoc! {r" + #if + #if + + #else + + #endif + + ˇ#else + #endif + "}); + + cx.simulate_shared_keystrokes("% %").await; + cx.shared_state().await.assert_eq(indoc! {r" + ˇ#if + #if + + #else + + #endif + + #else + #endif + "}); + cx.simulate_shared_keystrokes("j % % %").await; + cx.shared_state().await.assert_eq(indoc! {r" + #if + ˇ#if + + #else + + #endif + + #else + #endif + "}); + } + #[gpui::test] async fn test_unmatched_forward(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 118805586118e36269a1f0c1d1d619058133da30..b54a0262744afddbefbd3d4ce5a737dfe3ee7502 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -932,7 +932,7 @@ impl Vim { Vim::take_forced_motion(cx); self.update_editor(cx, |vim, editor, cx| { let selection = editor.selections.newest_anchor(); - let Some((buffer, point, _)) = editor + let Some((buffer, point)) = editor .buffer() .read(cx) .point_to_buffer_point(selection.head(), cx) diff --git a/crates/vim/src/normal/search.rs b/crates/vim/src/normal/search.rs index 6a8394f44710b7e241b7ba38f4913899a5afbce6..22c453c877ec89fdbf432d19d89167285b78b12f 100644 --- a/crates/vim/src/normal/search.rs +++ b/crates/vim/src/normal/search.rs @@ -245,7 +245,7 @@ impl Vim { search_bar.set_replacement(None, cx); let mut options = SearchOptions::NONE; - if action.regex { + if action.regex && VimSettings::get_global(cx).use_regex_search { options |= SearchOptions::REGEX; } if action.backwards { @@ -1446,4 +1446,66 @@ mod test { // The cursor should be at the match location on line 3 (row 2). cx.assert_state("hello world\nfoo bar\nhello ˇagain\n", Mode::Normal); } + + #[gpui::test] + async fn test_vim_search_respects_search_settings(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.update_global(|store: &mut SettingsStore, cx| { + store.update_user_settings(cx, |settings| { + settings.vim.get_or_insert_default().use_regex_search = Some(false); + }); + }); + + cx.set_state("ˇcontent", Mode::Normal); + cx.simulate_keystrokes("/"); + cx.run_until_parked(); + + // Verify search options are set from settings + let search_bar = cx.workspace(|workspace, _, cx| { + workspace + .active_pane() + .read(cx) + .toolbar() + .read(cx) + .item_of_type::() + .expect("Buffer search bar should be active") + }); + + cx.update_entity(search_bar, |bar, _window, _cx| { + assert!( + !bar.has_search_option(search::SearchOptions::REGEX), + "Vim search open without regex mode" + ); + }); + + cx.simulate_keystrokes("escape"); + cx.run_until_parked(); + + cx.update_global(|store: &mut SettingsStore, cx| { + store.update_user_settings(cx, |settings| { + settings.vim.get_or_insert_default().use_regex_search = Some(true); + }); + }); + + cx.simulate_keystrokes("/"); + cx.run_until_parked(); + + let search_bar = cx.workspace(|workspace, _, cx| { + workspace + .active_pane() + .read(cx) + .toolbar() + .read(cx) + .item_of_type::() + .expect("Buffer search bar should be active") + }); + + cx.update_entity(search_bar, |bar, _window, _cx| { + assert!( + bar.has_search_option(search::SearchOptions::REGEX), + "Vim search opens with regex mode" + ); + }); + } } diff --git a/crates/vim/src/object.rs b/crates/vim/src/object.rs index 1c96ba74b455c5d94e53a0ab9c78cd3ae8af5b3c..67b4b16b178e75316eb10b051ab9153737777e3f 100644 --- a/crates/vim/src/object.rs +++ b/crates/vim/src/object.rs @@ -203,33 +203,24 @@ fn find_mini_delimiters( is_valid_delimiter: &DelimiterPredicate, ) -> Option> { let point = map.clip_at_line_end(display_point).to_point(map); - let offset = point.to_offset(&map.buffer_snapshot()); + let offset = map.buffer_snapshot().point_to_offset(point); let line_range = get_line_range(map, point); let visible_line_range = get_visible_line_range(&line_range); let snapshot = &map.buffer_snapshot(); - let mut excerpt = snapshot.excerpt_containing(offset..offset)?; - let buffer = excerpt.buffer(); - let buffer_offset = excerpt.map_offset_to_buffer(offset); - let bracket_filter = |open: Range, close: Range| { - is_valid_delimiter(buffer, open.start, close.start) - }; - - // Try to find delimiters in visible range first let ranges = map .buffer_snapshot() .bracket_ranges(visible_line_range) .map(|ranges| { ranges.filter_map(|(open, close)| { - // Convert the ranges from multibuffer space to buffer space as - // that is what `is_valid_delimiter` expects, otherwise it might - // panic as the values might be out of bounds. - let buffer_open = excerpt.map_range_to_buffer(open.clone()); - let buffer_close = excerpt.map_range_to_buffer(close.clone()); + let (buffer, buffer_open) = + snapshot.range_to_buffer_range::(open.clone())?; + let (_, buffer_close) = + snapshot.range_to_buffer_range::(close.clone())?; - if is_valid_delimiter(buffer, buffer_open.start.0, buffer_close.start.0) { + if is_valid_delimiter(buffer, buffer_open.start, buffer_close.start) { Some((open, close)) } else { None @@ -247,18 +238,31 @@ fn find_mini_delimiters( ); } - // Fall back to innermost enclosing brackets - let (open_bracket, close_bracket) = buffer - .innermost_enclosing_bracket_ranges(buffer_offset..buffer_offset, Some(&bracket_filter))?; + let results = snapshot.map_excerpt_ranges(offset..offset, |buffer, _, input_range| { + let buffer_offset = input_range.start.0; + let bracket_filter = |open: Range, close: Range| { + is_valid_delimiter(buffer, open.start, close.start) + }; + let Some((open, close)) = buffer.innermost_enclosing_bracket_ranges( + buffer_offset..buffer_offset, + Some(&bracket_filter), + ) else { + return vec![]; + }; + vec![ + (BufferOffset(open.start)..BufferOffset(open.end), ()), + (BufferOffset(close.start)..BufferOffset(close.end), ()), + ] + })?; + + if results.len() < 2 { + return None; + } Some( DelimiterRange { - open: excerpt.map_range_from_buffer( - BufferOffset(open_bracket.start)..BufferOffset(open_bracket.end), - ), - close: excerpt.map_range_from_buffer( - BufferOffset(close_bracket.start)..BufferOffset(close_bracket.end), - ), + open: results[0].0.clone(), + close: results[1].0.clone(), } .to_display_range(map, around), ) @@ -935,61 +939,64 @@ pub fn surrounding_html_tag( } let snapshot = &map.buffer_snapshot(); - let offset = head.to_offset(map, Bias::Left); - let mut excerpt = snapshot.excerpt_containing(offset..offset)?; - let buffer = excerpt.buffer(); - let offset = excerpt.map_offset_to_buffer(offset); - - // Find the most closest to current offset - let mut cursor = buffer.syntax_layer_at(offset)?.node().walk(); - let mut last_child_node = cursor.node(); - while cursor.goto_first_child_for_byte(offset.0).is_some() { - last_child_node = cursor.node(); - } - - let mut last_child_node = Some(last_child_node); - while let Some(cur_node) = last_child_node { - if cur_node.child_count() >= 2 { - let first_child = cur_node.child(0); - let last_child = cur_node.child(cur_node.child_count() as u32 - 1); - if let (Some(first_child), Some(last_child)) = (first_child, last_child) { - let open_tag = open_tag(buffer.chars_for_range(first_child.byte_range())); - let close_tag = close_tag(buffer.chars_for_range(last_child.byte_range())); - // It needs to be handled differently according to the selection length - let is_valid = if range.end.to_offset(map, Bias::Left) - - range.start.to_offset(map, Bias::Left) - <= 1 - { - offset.0 <= last_child.end_byte() - } else { - excerpt - .map_offset_to_buffer(range.start.to_offset(map, Bias::Left)) - .0 - >= first_child.start_byte() - && excerpt - .map_offset_to_buffer(range.end.to_offset(map, Bias::Left)) - .0 - <= last_child.start_byte() + 1 - }; - if open_tag.is_some() && open_tag == close_tag && is_valid { - let range = if around { - first_child.byte_range().start..last_child.byte_range().end - } else { - first_child.byte_range().end..last_child.byte_range().start - }; - let range = BufferOffset(range.start)..BufferOffset(range.end); - if excerpt.contains_buffer_range(range.clone()) { - let result = excerpt.map_range_from_buffer(range); - return Some( - result.start.to_display_point(map)..result.end.to_display_point(map), - ); + let head_offset = head.to_offset(map, Bias::Left); + let range_start = range.start.to_offset(map, Bias::Left); + let range_end = range.end.to_offset(map, Bias::Left); + let head_is_start = head_offset <= range_start; + + let results = snapshot.map_excerpt_ranges( + range_start..range_end, + |buffer, _excerpt_range, input_buffer_range| { + let buffer_offset = if head_is_start { + input_buffer_range.start + } else { + input_buffer_range.end + }; + + let Some(layer) = buffer.syntax_layer_at(buffer_offset) else { + return Vec::new(); + }; + let mut cursor = layer.node().walk(); + let mut last_child_node = cursor.node(); + while cursor.goto_first_child_for_byte(buffer_offset.0).is_some() { + last_child_node = cursor.node(); + } + + let mut last_child_node = Some(last_child_node); + while let Some(cur_node) = last_child_node { + if cur_node.child_count() >= 2 { + let first_child = cur_node.child(0); + let last_child = cur_node.child(cur_node.child_count() as u32 - 1); + if let (Some(first_child), Some(last_child)) = (first_child, last_child) { + let open_tag = open_tag(buffer.chars_for_range(first_child.byte_range())); + let close_tag = close_tag(buffer.chars_for_range(last_child.byte_range())); + let is_valid = if range_end.saturating_sub(range_start) <= 1 { + buffer_offset.0 <= last_child.end_byte() + } else { + input_buffer_range.start.0 >= first_child.start_byte() + && input_buffer_range.end.0 <= last_child.start_byte() + 1 + }; + if open_tag.is_some() && open_tag == close_tag && is_valid { + let buffer_range = if around { + first_child.byte_range().start..last_child.byte_range().end + } else { + first_child.byte_range().end..last_child.byte_range().start + }; + return vec![( + BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end), + (), + )]; + } } } + last_child_node = cur_node.parent(); } - } - last_child_node = cur_node.parent(); - } - None + Vec::new() + }, + )?; + + let (result, ()) = results.into_iter().next()?; + Some(result.start.to_display_point(map)..result.end.to_display_point(map)) } /// Returns a range that surrounds the word and following whitespace @@ -1163,44 +1170,55 @@ fn text_object( let snapshot = &map.buffer_snapshot(); let offset = relative_to.to_offset(map, Bias::Left); - let mut excerpt = snapshot.excerpt_containing(offset..offset)?; - let buffer = excerpt.buffer(); - let offset = excerpt.map_offset_to_buffer(offset); - - let mut matches: Vec> = buffer - .text_object_ranges(offset..offset, TreeSitterOptions::default()) - .filter_map(|(r, m)| if m == target { Some(r) } else { None }) - .collect(); - matches.sort_by_key(|r| r.end - r.start); - if let Some(buffer_range) = matches.first() { - let buffer_range = BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end); - let range = excerpt.map_range_from_buffer(buffer_range); - return Some(range.start.to_display_point(map)..range.end.to_display_point(map)); - } - - let around = target.around()?; - let mut matches: Vec> = buffer - .text_object_ranges(offset..offset, TreeSitterOptions::default()) - .filter_map(|(r, m)| if m == around { Some(r) } else { None }) - .collect(); - matches.sort_by_key(|r| r.end - r.start); - let around_range = matches.first()?; - - let mut matches: Vec> = buffer - .text_object_ranges(around_range.clone(), TreeSitterOptions::default()) - .filter_map(|(r, m)| if m == target { Some(r) } else { None }) - .collect(); - matches.sort_by_key(|r| r.start); - if let Some(buffer_range) = matches.first() - && !buffer_range.is_empty() - { - let buffer_range = BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end); - let range = excerpt.map_range_from_buffer(buffer_range); - return Some(range.start.to_display_point(map)..range.end.to_display_point(map)); - } - let around_range = BufferOffset(around_range.start)..BufferOffset(around_range.end); - let buffer_range = excerpt.map_range_from_buffer(around_range); - return Some(buffer_range.start.to_display_point(map)..buffer_range.end.to_display_point(map)); + let results = + snapshot.map_excerpt_ranges(offset..offset, |buffer, _excerpt_range, buffer_range| { + let buffer_offset = buffer_range.start; + + let mut matches: Vec> = buffer + .text_object_ranges(buffer_offset..buffer_offset, TreeSitterOptions::default()) + .filter_map(|(r, m)| if m == target { Some(r) } else { None }) + .collect(); + matches.sort_by_key(|r| r.end - r.start); + if let Some(buffer_range) = matches.first() { + return vec![( + BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end), + (), + )]; + } + + let Some(around) = target.around() else { + return vec![]; + }; + let mut matches: Vec> = buffer + .text_object_ranges(buffer_offset..buffer_offset, TreeSitterOptions::default()) + .filter_map(|(r, m)| if m == around { Some(r) } else { None }) + .collect(); + matches.sort_by_key(|r| r.end - r.start); + let Some(around_range) = matches.first() else { + return vec![]; + }; + + let mut matches: Vec> = buffer + .text_object_ranges(around_range.clone(), TreeSitterOptions::default()) + .filter_map(|(r, m)| if m == target { Some(r) } else { None }) + .collect(); + matches.sort_by_key(|r| r.start); + if let Some(buffer_range) = matches.first() + && !buffer_range.is_empty() + { + return vec![( + BufferOffset(buffer_range.start)..BufferOffset(buffer_range.end), + (), + )]; + } + vec![( + BufferOffset(around_range.start)..BufferOffset(around_range.end), + (), + )] + })?; + + let (range, ()) = results.into_iter().next()?; + Some(range.start.to_display_point(map)..range.end.to_display_point(map)) } fn argument( @@ -1211,16 +1229,11 @@ fn argument( let snapshot = &map.buffer_snapshot(); let offset = relative_to.to_offset(map, Bias::Left); - // The `argument` vim text object uses the syntax tree, so we operate at the buffer level and map back to the display level - let mut excerpt = snapshot.excerpt_containing(offset..offset)?; - let buffer = excerpt.buffer(); - fn comma_delimited_range_at( buffer: &BufferSnapshot, mut offset: BufferOffset, include_comma: bool, ) -> Option> { - // Seek to the first non-whitespace character offset += buffer .chars_at(offset) .take_while(|c| c.is_whitespace()) @@ -1228,25 +1241,20 @@ fn argument( .sum::(); let bracket_filter = |open: Range, close: Range| { - // Filter out empty ranges if open.end == close.start { return false; } - // If the cursor is outside the brackets, ignore them if open.start == offset.0 || close.end == offset.0 { return false; } - // TODO: Is there any better way to filter out string brackets? - // Used to filter out string brackets matches!( buffer.chars_at(open.start).next(), Some('(' | '[' | '{' | '<' | '|') ) }; - // Find the brackets containing the cursor let (open_bracket, close_bracket) = buffer.innermost_enclosing_bracket_ranges(offset..offset, Some(&bracket_filter))?; @@ -1256,7 +1264,6 @@ fn argument( let node = layer.node(); let mut cursor = node.walk(); - // Loop until we find the smallest node whose parent covers the bracket range. This node is the argument in the parent argument list let mut parent_covers_bracket_range = false; loop { let node = cursor.node(); @@ -1268,20 +1275,17 @@ fn argument( } parent_covers_bracket_range = covers_bracket_range; - // Unable to find a child node with a parent that covers the bracket range, so no argument to select cursor.goto_first_child_for_byte(offset.0)?; } let mut argument_node = cursor.node(); - // If the child node is the open bracket, move to the next sibling. if argument_node.byte_range() == open_bracket { if !cursor.goto_next_sibling() { return Some(inner_bracket_range); } argument_node = cursor.node(); } - // While the child node is the close bracket or a comma, move to the previous sibling while argument_node.byte_range() == close_bracket || argument_node.kind() == "," { if !cursor.goto_previous_sibling() { return Some(inner_bracket_range); @@ -1292,14 +1296,11 @@ fn argument( } } - // The start and end of the argument range, defaulting to the start and end of the argument node let mut start = argument_node.start_byte(); let mut end = argument_node.end_byte(); let mut needs_surrounding_comma = include_comma; - // Seek backwards to find the start of the argument - either the previous comma or the opening bracket. - // We do this because multiple nodes can represent a single argument, such as with rust `vec![a.b.c, d.e.f]` while cursor.goto_previous_sibling() { let prev = cursor.node(); @@ -1317,7 +1318,6 @@ fn argument( } } - // Do the same for the end of the argument, extending to next comma or the end of the argument list while cursor.goto_next_sibling() { let next = cursor.node(); @@ -1326,7 +1326,6 @@ fn argument( break; } else if next.kind() == "," { if needs_surrounding_comma { - // Select up to the beginning of the next argument if there is one, otherwise to the end of the comma if let Some(next_arg) = next.next_sibling() { end = next_arg.start_byte(); } else { @@ -1342,14 +1341,17 @@ fn argument( Some(BufferOffset(start)..BufferOffset(end)) } - let result = comma_delimited_range_at(buffer, excerpt.map_offset_to_buffer(offset), around)?; + let results = + snapshot.map_excerpt_ranges(offset..offset, |buffer, _excerpt_range, buffer_range| { + let buffer_offset = buffer_range.start; + match comma_delimited_range_at(buffer, buffer_offset, around) { + Some(result) => vec![(result, ())], + None => vec![], + } + })?; - if excerpt.contains_buffer_range(result.clone()) { - let result = excerpt.map_range_from_buffer(result); - Some(result.start.to_display_point(map)..result.end.to_display_point(map)) - } else { - None - } + let (range, ()) = results.into_iter().next()?; + Some(range.start.to_display_point(map)..range.end.to_display_point(map)) } fn indent( @@ -3369,7 +3371,12 @@ mod test { // but, since this is being set manually, the language isn't // automatically set. let editor = Editor::new(EditorMode::full(), multi_buffer.clone(), None, window, cx); - let buffer_ids = multi_buffer.read(cx).excerpt_buffer_ids(); + let buffer_ids = multi_buffer + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) + .collect::>(); if let Some(buffer) = multi_buffer.read(cx).buffer(buffer_ids[1]) { buffer.update(cx, |buffer, cx| { buffer.set_language(Some(language::rust_lang()), cx); diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index 2fa5382c542999b8d3cb53ea85bed4c99257a3ea..4dd557199ab9aebe0a2b26438bdaa0e321a956b2 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -426,7 +426,7 @@ impl MarksState { name.clone(), buffer .read(cx) - .summaries_for_anchors::(anchors) + .summaries_for_anchors::(anchors.iter().copied()) .collect(), ) }) @@ -492,7 +492,14 @@ impl MarksState { { let buffer_marks = old_marks .into_iter() - .map(|(k, v)| (k, v.into_iter().map(|anchor| anchor.text_anchor).collect())) + .map(|(k, v)| { + ( + k, + v.into_iter() + .filter_map(|anchor| anchor.raw_text_anchor()) + .collect(), + ) + }) .collect(); self.buffer_marks .insert(buffer.read(cx).remote_id(), buffer_marks); @@ -569,6 +576,7 @@ impl MarksState { anchors: Vec, cx: &mut Context, ) { + let multibuffer_snapshot = multibuffer.read(cx).snapshot(cx); let buffer = multibuffer.read(cx).as_singleton(); let abs_path = buffer.as_ref().and_then(|b| self.path_for_buffer(b, cx)); @@ -602,7 +610,7 @@ impl MarksState { name.clone(), anchors .into_iter() - .map(|anchor| anchor.text_anchor) + .filter_map(|anchor| Some(multibuffer_snapshot.anchor_to_buffer_anchor(anchor)?.0)) .collect(), ); if !self.watched_buffers.contains_key(&buffer_id) { @@ -629,12 +637,13 @@ impl MarksState { return Some(Mark::Local(anchors.get(name)?.clone())); } - let (excerpt_id, buffer_id, _) = multi_buffer.read(cx).read(cx).as_singleton()?; - if let Some(anchors) = self.buffer_marks.get(&buffer_id) { + let multibuffer_snapshot = multi_buffer.read(cx).snapshot(cx); + let buffer_snapshot = multibuffer_snapshot.as_singleton()?; + if let Some(anchors) = self.buffer_marks.get(&buffer_snapshot.remote_id()) { let text_anchors = anchors.get(name)?; let anchors = text_anchors .iter() - .map(|anchor| Anchor::in_buffer(excerpt_id, *anchor)) + .filter_map(|anchor| multibuffer_snapshot.anchor_in_excerpt(*anchor)) .collect(); return Some(Mark::Local(anchors)); } @@ -895,14 +904,13 @@ impl VimGlobals { } } '%' => editor.and_then(|editor| { - let selection = editor - .selections - .newest::(&editor.display_snapshot(cx)); - if let Some((_, buffer, _)) = editor - .buffer() - .read(cx) - .excerpt_containing(selection.head(), cx) - { + let multibuffer = editor.buffer().read(cx); + let snapshot = multibuffer.snapshot(cx); + let selection = editor.selections.newest_anchor(); + let buffer = snapshot + .anchor_to_buffer_anchor(selection.head()) + .and_then(|(text_anchor, _)| multibuffer.buffer(text_anchor.buffer_id)); + if let Some(buffer) = buffer { buffer .read(cx) .file() diff --git a/crates/vim/src/test.rs b/crates/vim/src/test.rs index 2d0ec4f69a0aaa93b191933565b9db27d8fb3198..961729e0e24a66a624e30ca7c72bfe5f13e10bca 100644 --- a/crates/vim/src/test.rs +++ b/crates/vim/src/test.rs @@ -2117,7 +2117,12 @@ async fn test_folded_multibuffer_excerpts(cx: &mut gpui::TestAppContext) { ); let mut editor = Editor::new(EditorMode::full(), multi_buffer.clone(), None, window, cx); - let buffer_ids = multi_buffer.read(cx).excerpt_buffer_ids(); + let buffer_ids = multi_buffer + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) + .collect::>(); // fold all but the second buffer, so that we test navigating between two // adjacent folded buffers, as well as folded buffers at the start and // end the multibuffer @@ -2262,7 +2267,13 @@ async fn test_folded_multibuffer_excerpts(cx: &mut gpui::TestAppContext) { " }); cx.update_editor(|editor, _, cx| { - let buffer_ids = editor.buffer().read(cx).excerpt_buffer_ids(); + let buffer_ids = editor + .buffer() + .read(cx) + .snapshot(cx) + .excerpts() + .map(|excerpt| excerpt.context.start.buffer_id) + .collect::>(); editor.fold_buffer(buffer_ids[1], cx); }); diff --git a/crates/vim/src/test/vim_test_context.rs b/crates/vim/src/test/vim_test_context.rs index 510d218df050455d0df0f9c2b7b782a651694cd7..6f15450aa3f70593c6877c293fecb765978e065d 100644 --- a/crates/vim/src/test/vim_test_context.rs +++ b/crates/vim/src/test/vim_test_context.rs @@ -109,12 +109,12 @@ impl VimTestContext { } cx.bind_keys(default_key_bindings); if enabled { - let vim_key_bindings = settings::KeymapFile::load_asset( - "keymaps/vim.json", - Some(settings::KeybindSource::Vim), - cx, - ) - .unwrap(); + let mut vim_key_bindings = + settings::KeymapFile::load_asset_allow_partial_failure("keymaps/vim.json", cx) + .unwrap(); + for key_binding in &mut vim_key_bindings { + key_binding.set_meta(settings::KeybindSource::Vim.meta()); + } cx.bind_keys(vim_key_bindings); } } diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 6e1849340f17b776a34546dd9a118dc55e8dab84..a66111cae1576744c4c51d717984d67c12fc8235 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -2141,6 +2141,7 @@ struct VimSettings { pub toggle_relative_line_numbers: bool, pub use_system_clipboard: settings::UseSystemClipboard, pub use_smartcase_find: bool, + pub use_regex_search: bool, pub gdefault: bool, pub custom_digraphs: HashMap>, pub highlight_on_yank_duration: u64, @@ -2227,6 +2228,7 @@ impl Settings for VimSettings { toggle_relative_line_numbers: vim.toggle_relative_line_numbers.unwrap(), use_system_clipboard: vim.use_system_clipboard.unwrap(), use_smartcase_find: vim.use_smartcase_find.unwrap(), + use_regex_search: vim.use_regex_search.unwrap(), gdefault: vim.gdefault.unwrap(), custom_digraphs: vim.custom_digraphs.unwrap(), highlight_on_yank_duration: vim.highlight_on_yank_duration.unwrap(), diff --git a/crates/vim/test_data/test_matching_comments.json b/crates/vim/test_data/test_matching_comments.json new file mode 100644 index 0000000000000000000000000000000000000000..7fcf5e46e1ea16f2be794ff76b583242b33aabc0 --- /dev/null +++ b/crates/vim/test_data/test_matching_comments.json @@ -0,0 +1,10 @@ +{"Put":{"state":"ˇ/*\n this is a comment\n*/"}} +{"Key":"%"} +{"Get":{"state":"/*\n this is a comment\nˇ*/","mode":"Normal"}} +{"Key":"%"} +{"Get":{"state":"ˇ/*\n this is a comment\n*/","mode":"Normal"}} +{"Key":"%"} +{"Get":{"state":"/*\n this is a comment\nˇ*/","mode":"Normal"}} +{"Put":{"state":"ˇ// comment"}} +{"Key":"%"} +{"Get":{"state":"ˇ// comment","mode":"Normal"}} diff --git a/crates/vim/test_data/test_matching_preprocessor_directives.json b/crates/vim/test_data/test_matching_preprocessor_directives.json new file mode 100644 index 0000000000000000000000000000000000000000..9f0bd9792ee8dad5029f4ecaf325c231755530e1 --- /dev/null +++ b/crates/vim/test_data/test_matching_preprocessor_directives.json @@ -0,0 +1,18 @@ +{"Put":{"state":"#ˇif\n\n#else\n\n#endif\n"}} +{"Key":"%"} +{"Get":{"state":"#if\n\nˇ#else\n\n#endif\n","mode":"Normal"}} +{"Key":"%"} +{"Get":{"state":"#if\n\n#else\n\nˇ#endif\n","mode":"Normal"}} +{"Key":"%"} +{"Get":{"state":"ˇ#if\n\n#else\n\n#endif\n","mode":"Normal"}} +{"Put":{"state":"#ˇif\n #if\n\n #else\n\n #endif\n\n#else\n#endif\n"}} +{"Key":"%"} +{"Get":{"state":"#if\n #if\n\n #else\n\n #endif\n\nˇ#else\n#endif\n","mode":"Normal"}} +{"Key":"%"} +{"Key":"%"} +{"Get":{"state":"ˇ#if\n #if\n\n #else\n\n #endif\n\n#else\n#endif\n","mode":"Normal"}} +{"Key":"j"} +{"Key":"%"} +{"Key":"%"} +{"Key":"%"} +{"Get":{"state":"#if\n ˇ#if\n\n #else\n\n #endif\n\n#else\n#endif\n","mode":"Normal"}} diff --git a/crates/web_search_providers/src/cloud.rs b/crates/web_search_providers/src/cloud.rs index 17addd24d445a666138a1b37fef872beedd07aed..11227d8fb5c7152dc5b7e03b95fadea6cb714717 100644 --- a/crates/web_search_providers/src/cloud.rs +++ b/crates/web_search_providers/src/cloud.rs @@ -1,13 +1,13 @@ use std::sync::Arc; use anyhow::{Context as _, Result}; -use client::{Client, UserStore}; +use client::{Client, NeedsLlmTokenRefresh, UserStore, global_llm_token}; use cloud_api_types::OrganizationId; use cloud_llm_client::{WebSearchBody, WebSearchResponse}; use futures::AsyncReadExt as _; use gpui::{App, AppContext, Context, Entity, Task}; use http_client::{HttpClient, Method}; -use language_model::{LlmApiToken, NeedsLlmTokenRefresh}; +use language_model::LlmApiToken; use web_search::{WebSearchProvider, WebSearchProviderId}; pub struct CloudWebSearchProvider { @@ -30,7 +30,7 @@ pub struct State { impl State { pub fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { - let llm_api_token = LlmApiToken::global(cx); + let llm_api_token = global_llm_token(cx); Self { client, @@ -73,8 +73,8 @@ async fn perform_web_search( let http_client = &client.http_client(); let mut retries_remaining = MAX_RETRIES; - let mut token = llm_api_token - .acquire(&client, organization_id.clone()) + let mut token = client + .acquire_llm_token(&llm_api_token, organization_id.clone()) .await?; loop { @@ -100,8 +100,8 @@ async fn perform_web_search( response.body_mut().read_to_string(&mut body).await?; return Ok(serde_json::from_str(&body)?); } else if response.needs_llm_token_refresh() { - token = llm_api_token - .refresh(&client, organization_id.clone()) + token = client + .refresh_llm_token(&llm_api_token, organization_id.clone()) .await?; retries_remaining -= 1; } else { diff --git a/crates/workspace/src/dock.rs b/crates/workspace/src/dock.rs index e36b48f06fd3ca0983b13ddb564af08ddab9fba5..e58b4b59100c05085c93993370b85a788fc159ca 100644 --- a/crates/workspace/src/dock.rs +++ b/crates/workspace/src/dock.rs @@ -1,5 +1,6 @@ +use crate::focus_follows_mouse::FocusFollowsMouse as _; use crate::persistence::model::DockData; -use crate::{DraggedDock, Event, ModalLayer, Pane}; +use crate::{DraggedDock, Event, FocusFollowsMouse, ModalLayer, Pane, WorkspaceSettings}; use crate::{Workspace, status_bar::StatusItemView}; use anyhow::Context as _; use client::proto; @@ -12,7 +13,7 @@ use gpui::{ px, }; use serde::{Deserialize, Serialize}; -use settings::SettingsStore; +use settings::{Settings, SettingsStore}; use std::sync::Arc; use ui::{ ContextMenu, CountBadge, Divider, DividerColor, IconButton, Tooltip, prelude::*, @@ -252,6 +253,7 @@ pub struct Dock { is_open: bool, active_panel_index: Option, focus_handle: FocusHandle, + focus_follows_mouse: FocusFollowsMouse, pub(crate) serialized_dock: Option, zoom_layer_open: bool, modal_layer: Entity, @@ -376,6 +378,7 @@ impl Dock { active_panel_index: None, is_open: false, focus_handle: focus_handle.clone(), + focus_follows_mouse: WorkspaceSettings::get_global(cx).focus_follows_mouse, _subscriptions: [focus_subscription, zoom_subscription], serialized_dock: None, zoom_layer_open: false, @@ -1086,8 +1089,10 @@ impl Render for Dock { }; div() + .id("dock-panel") .key_context(dispatch_context) .track_focus(&self.focus_handle(cx)) + .focus_follows_mouse(self.focus_follows_mouse, cx) .flex() .bg(cx.theme().colors().panel_background) .border_color(cx.theme().colors().border) @@ -1121,6 +1126,7 @@ impl Render for Dock { }) } else { div() + .id("dock-panel") .key_context(dispatch_context) .track_focus(&self.focus_handle(cx)) } diff --git a/crates/workspace/src/focus_follows_mouse.rs b/crates/workspace/src/focus_follows_mouse.rs new file mode 100644 index 0000000000000000000000000000000000000000..da433cefcf059960181c190da83b06260651b063 --- /dev/null +++ b/crates/workspace/src/focus_follows_mouse.rs @@ -0,0 +1,71 @@ +use gpui::{ + AnyWindowHandle, AppContext as _, Context, FocusHandle, Focusable, Global, + StatefulInteractiveElement, Task, +}; + +use crate::workspace_settings; + +#[derive(Default)] +struct FfmState { + // The window and element to be focused + handles: Option<(AnyWindowHandle, FocusHandle)>, + // The debounced task which will do the focusing + _debounce_task: Option>, +} + +impl Global for FfmState {} + +pub trait FocusFollowsMouse: StatefulInteractiveElement { + fn focus_follows_mouse( + self, + settings: workspace_settings::FocusFollowsMouse, + cx: &Context, + ) -> Self { + if settings.enabled { + self.on_hover(cx.listener(move |this, enter, window, cx| { + if *enter { + let window_handle = window.window_handle(); + let focus_handle = this.focus_handle(cx); + + let state = cx.try_global::(); + + // Only replace the target if the new handle doesn't contain the existing one. + // This ensures that hovering over a parent (e.g., Dock) doesn't override + // a more specific child target (e.g., a Pane inside the Dock). + let should_replace = state + .and_then(|s| s.handles.as_ref()) + .map(|(_, existing)| !focus_handle.contains(existing, window)) + .unwrap_or(true); + + if !should_replace { + return; + } + + let debounce_task = cx.spawn(async move |_this, cx| { + cx.background_executor().timer(settings.debounce).await; + + cx.update(|cx| { + let state = cx.default_global::(); + let Some((window, focus)) = state.handles.take() else { + return; + }; + + let _ = cx.update_window(window, move |_view, window, cx| { + window.focus(&focus, cx); + }); + }); + }); + + cx.set_global(FfmState { + handles: Some((window_handle, focus_handle)), + _debounce_task: Some(debounce_task), + }); + } + })) + } else { + self + } + } +} + +impl FocusFollowsMouse for T {} diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index ed104a534eba7707a04a60775ae08820c4f258b8..64647419e300357e360e3ac3f535d8bbcd076711 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -9,7 +9,7 @@ use crate::{ }; use anyhow::Result; use client::{Client, proto}; -use futures::{StreamExt, channel::mpsc}; +use futures::channel::mpsc; use gpui::{ Action, AnyElement, AnyEntity, AnyView, App, AppContext, Context, Entity, EntityId, EventEmitter, FocusHandle, Focusable, Font, Pixels, Point, Render, SharedString, Task, @@ -777,8 +777,8 @@ impl ItemHandle for Entity { send_follower_updates = Some(cx.spawn_in(window, { let pending_update = pending_update.clone(); async move |workspace, cx| { - while let Some(mut leader_id) = pending_update_rx.next().await { - while let Ok(Some(id)) = pending_update_rx.try_next() { + while let Ok(mut leader_id) = pending_update_rx.recv().await { + while let Ok(id) = pending_update_rx.try_recv() { leader_id = id; } diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index 10a5ce70ead2d5aea7cc21a9af53ee9f216859c3..a61ad3576c57ecd8b1811363d6b5607ead737821 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -1,20 +1,21 @@ use anyhow::Result; use feature_flags::{AgentV2FeatureFlag, FeatureFlagAppExt}; +use gpui::PathPromptOptions; use gpui::{ AnyView, App, Context, DragMoveEvent, Entity, EntityId, EventEmitter, FocusHandle, Focusable, ManagedView, MouseButton, Pixels, Render, Subscription, Task, Tiling, Window, WindowId, actions, deferred, px, }; -use project::DisableAiSettings; -#[cfg(any(test, feature = "test-support"))] -use project::Project; +use project::{DirectoryLister, DisableAiSettings, Project, ProjectGroupKey}; use settings::Settings; pub use settings::SidebarSide; use std::future::Future; +use std::path::Path; use std::path::PathBuf; use std::sync::Arc; use ui::prelude::*; use util::ResultExt; +use util::path_list::PathList; use zed_actions::agents_sidebar::{MoveWorkspaceToNewWindow, ToggleThreadSwitcher}; use agent_settings::AgentSettings; @@ -23,9 +24,11 @@ use ui::{ContextMenu, right_click_menu}; const SIDEBAR_RESIZE_HANDLE_SIZE: Pixels = px(6.0); +use crate::AppState; use crate::{ CloseIntent, CloseWindow, DockPosition, Event as WorkspaceEvent, Item, ModalView, OpenMode, Panel, Workspace, WorkspaceId, client_side_decorations, + persistence::model::MultiWorkspaceState, }; actions!( @@ -37,10 +40,7 @@ actions!( CloseWorkspaceSidebar, /// Moves focus to or from the workspace sidebar without closing it. FocusWorkspaceSidebar, - /// Switches to the next workspace. - NextWorkspace, - /// Switches to the previous workspace. - PreviousWorkspace, + //TODO: Restore next/previous workspace ] ); @@ -218,10 +218,58 @@ impl SidebarHandle for Entity { } } +/// Tracks which workspace the user is currently looking at. +/// +/// `Persistent` workspaces live in the `workspaces` vec and are shown in the +/// sidebar. `Transient` workspaces exist outside the vec and are discarded +/// when the user switches away. +enum ActiveWorkspace { + /// A persistent workspace, identified by index into the `workspaces` vec. + Persistent(usize), + /// A workspace not in the `workspaces` vec that will be discarded on + /// switch or promoted to persistent when the sidebar is opened. + Transient(Entity), +} + +impl ActiveWorkspace { + fn persistent_index(&self) -> Option { + match self { + Self::Persistent(index) => Some(*index), + Self::Transient(_) => None, + } + } + + fn transient_workspace(&self) -> Option<&Entity> { + match self { + Self::Transient(workspace) => Some(workspace), + Self::Persistent(_) => None, + } + } + + /// Sets the active workspace to transient, returning the previous + /// transient workspace (if any). + fn set_transient(&mut self, workspace: Entity) -> Option> { + match std::mem::replace(self, Self::Transient(workspace)) { + Self::Transient(old) => Some(old), + Self::Persistent(_) => None, + } + } + + /// Sets the active workspace to persistent at the given index, + /// returning the previous transient workspace (if any). + fn set_persistent(&mut self, index: usize) -> Option> { + match std::mem::replace(self, Self::Persistent(index)) { + Self::Transient(workspace) => Some(workspace), + Self::Persistent(_) => None, + } + } +} + pub struct MultiWorkspace { window_id: WindowId, workspaces: Vec>, - active_workspace_index: usize, + active_workspace: ActiveWorkspace, + project_group_keys: Vec, sidebar: Option>, sidebar_open: bool, sidebar_overlay: Option, @@ -256,12 +304,15 @@ impl MultiWorkspace { } }); let quit_subscription = cx.on_app_quit(Self::app_will_quit); - let settings_subscription = - cx.observe_global_in::(window, |this, window, cx| { - if DisableAiSettings::get_global(cx).disable_ai && this.sidebar_open { - this.close_sidebar(window, cx); + let settings_subscription = cx.observe_global_in::(window, { + let mut previous_disable_ai = DisableAiSettings::get_global(cx).disable_ai; + move |this, window, cx| { + if DisableAiSettings::get_global(cx).disable_ai != previous_disable_ai { + this.collapse_to_single_workspace(window, cx); + previous_disable_ai = DisableAiSettings::get_global(cx).disable_ai; } - }); + } + }); Self::subscribe_to_workspace(&workspace, window, cx); let weak_self = cx.weak_entity(); workspace.update(cx, |workspace, cx| { @@ -269,8 +320,9 @@ impl MultiWorkspace { }); Self { window_id: window.window_handle().window_id(), - workspaces: vec![workspace], - active_workspace_index: 0, + project_group_keys: Vec::new(), + workspaces: Vec::new(), + active_workspace: ActiveWorkspace::Transient(workspace), sidebar: None, sidebar_open: false, sidebar_overlay: None, @@ -332,7 +384,7 @@ impl MultiWorkspace { return; } - if self.sidebar_open { + if self.sidebar_open() { self.close_sidebar(window, cx); } else { self.open_sidebar(cx); @@ -348,7 +400,7 @@ impl MultiWorkspace { return; } - if self.sidebar_open { + if self.sidebar_open() { self.close_sidebar(window, cx); } } @@ -358,7 +410,7 @@ impl MultiWorkspace { return; } - if self.sidebar_open { + if self.sidebar_open() { let sidebar_is_focused = self .sidebar .as_ref() @@ -383,8 +435,13 @@ impl MultiWorkspace { pub fn open_sidebar(&mut self, cx: &mut Context) { self.sidebar_open = true; + if let ActiveWorkspace::Transient(workspace) = &self.active_workspace { + let workspace = workspace.clone(); + let index = self.promote_transient(workspace, cx); + self.active_workspace = ActiveWorkspace::Persistent(index); + } let sidebar_focus_handle = self.sidebar.as_ref().map(|s| s.focus_handle(cx)); - for workspace in &self.workspaces { + for workspace in self.workspaces.iter() { workspace.update(cx, |workspace, _cx| { workspace.set_sidebar_focus_handle(sidebar_focus_handle.clone()); }); @@ -395,7 +452,7 @@ impl MultiWorkspace { pub fn close_sidebar(&mut self, window: &mut Window, cx: &mut Context) { self.sidebar_open = false; - for workspace in &self.workspaces { + for workspace in self.workspaces.iter() { workspace.update(cx, |workspace, _cx| { workspace.set_sidebar_focus_handle(None); }); @@ -410,7 +467,7 @@ impl MultiWorkspace { pub fn close_window(&mut self, _: &CloseWindow, window: &mut Window, cx: &mut Context) { cx.spawn_in(window, async move |this, cx| { let workspaces = this.update(cx, |multi_workspace, _cx| { - multi_workspace.workspaces().to_vec() + multi_workspace.workspaces().cloned().collect::>() })?; for workspace in workspaces { @@ -438,6 +495,20 @@ impl MultiWorkspace { window: &Window, cx: &mut Context, ) { + let project = workspace.read(cx).project().clone(); + cx.subscribe_in(&project, window, { + let workspace = workspace.downgrade(); + move |this, _project, event, _window, cx| match event { + project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => { + if let Some(workspace) = workspace.upgrade() { + this.add_project_group_key(workspace.read(cx).project_group_key(cx)); + } + } + _ => {} + } + }) + .detach(); + cx.subscribe_in(workspace, window, |this, workspace, event, window, cx| { if let WorkspaceEvent::Activate = event { this.activate(workspace.clone(), window, cx); @@ -446,98 +517,322 @@ impl MultiWorkspace { .detach(); } - pub fn workspace(&self) -> &Entity { - &self.workspaces[self.active_workspace_index] + pub fn add_project_group_key(&mut self, project_group_key: ProjectGroupKey) { + if project_group_key.path_list().paths().is_empty() { + return; + } + if self.project_group_keys.contains(&project_group_key) { + return; + } + self.project_group_keys.push(project_group_key); } - pub fn workspaces(&self) -> &[Entity] { - &self.workspaces + pub fn restore_project_group_keys(&mut self, keys: Vec) { + let mut restored = keys; + for existing_key in &self.project_group_keys { + if !restored.contains(existing_key) { + restored.push(existing_key.clone()); + } + } + self.project_group_keys = restored; } - pub fn active_workspace_index(&self) -> usize { - self.active_workspace_index + pub fn project_group_keys(&self) -> impl Iterator { + self.project_group_keys.iter() } - /// Adds a workspace to this window without changing which workspace is - /// active. - pub fn add(&mut self, workspace: Entity, window: &Window, cx: &mut Context) { - if !self.multi_workspace_enabled(cx) { - self.set_single_workspace(workspace, cx); - return; + /// Returns the project groups, ordered by most recently added. + pub fn project_groups( + &self, + cx: &App, + ) -> impl Iterator>)> { + let mut groups = self + .project_group_keys + .iter() + .rev() + .map(|key| (key.clone(), Vec::new())) + .collect::>(); + for workspace in &self.workspaces { + let key = workspace.read(cx).project_group_key(cx); + if let Some((_, workspaces)) = groups.iter_mut().find(|(k, _)| k == &key) { + workspaces.push(workspace.clone()); + } } + groups.into_iter() + } - self.insert_workspace(workspace, window, cx); + pub fn workspaces_for_project_group( + &self, + project_group_key: &ProjectGroupKey, + cx: &App, + ) -> impl Iterator> { + self.workspaces + .iter() + .filter(move |ws| ws.read(cx).project_group_key(cx) == *project_group_key) } - /// Ensures the workspace is in the multiworkspace and makes it the active one. - pub fn activate( + pub fn remove_folder_from_project_group( &mut self, - workspace: Entity, - window: &mut Window, + project_group_key: &ProjectGroupKey, + path: &Path, cx: &mut Context, ) { - if !self.multi_workspace_enabled(cx) { - self.set_single_workspace(workspace, cx); + let new_path_list = project_group_key.path_list().without_path(path); + if new_path_list.is_empty() { return; } - let index = self.insert_workspace(workspace, &*window, cx); - let changed = self.active_workspace_index != index; - self.active_workspace_index = index; - if changed { - cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); - self.serialize(cx); + let new_key = ProjectGroupKey::new(project_group_key.host(), new_path_list); + + let workspaces: Vec<_> = self + .workspaces_for_project_group(project_group_key, cx) + .cloned() + .collect(); + + self.add_project_group_key(new_key); + + for workspace in workspaces { + let project = workspace.read(cx).project().clone(); + project.update(cx, |project, cx| { + project.remove_worktree_for_main_worktree_path(path, cx); + }); } - self.focus_active_workspace(window, cx); + + self.serialize(cx); cx.notify(); } - /// Replaces the currently active workspace with a new one. If the - /// workspace is already in the list, this just switches to it. - pub fn replace( + pub fn prompt_to_add_folders_to_project_group( &mut self, - workspace: Entity, - window: &Window, + key: &ProjectGroupKey, + window: &mut Window, cx: &mut Context, ) { - if !self.multi_workspace_enabled(cx) { - self.set_single_workspace(workspace, cx); - return; + let paths = self.workspace().update(cx, |workspace, cx| { + workspace.prompt_for_open_path( + PathPromptOptions { + files: false, + directories: true, + multiple: true, + prompt: None, + }, + DirectoryLister::Project(workspace.project().clone()), + window, + cx, + ) + }); + + let key = key.clone(); + cx.spawn_in(window, async move |this, cx| { + if let Some(new_paths) = paths.await.ok().flatten() { + if !new_paths.is_empty() { + this.update(cx, |multi_workspace, cx| { + multi_workspace.add_folders_to_project_group(&key, new_paths, cx); + })?; + } + } + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + + pub fn add_folders_to_project_group( + &mut self, + project_group_key: &ProjectGroupKey, + new_paths: Vec, + cx: &mut Context, + ) { + let mut all_paths: Vec = project_group_key.path_list().paths().to_vec(); + all_paths.extend(new_paths.iter().cloned()); + let new_path_list = PathList::new(&all_paths); + let new_key = ProjectGroupKey::new(project_group_key.host(), new_path_list); + + let workspaces: Vec<_> = self + .workspaces_for_project_group(project_group_key, cx) + .cloned() + .collect(); + + self.add_project_group_key(new_key); + + for workspace in workspaces { + let project = workspace.read(cx).project().clone(); + for path in &new_paths { + project + .update(cx, |project, cx| { + project.find_or_create_worktree(path, true, cx) + }) + .detach_and_log_err(cx); + } } - if let Some(index) = self.workspaces.iter().position(|w| *w == workspace) { - let changed = self.active_workspace_index != index; - self.active_workspace_index = index; - if changed { - cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); - self.serialize(cx); + self.serialize(cx); + cx.notify(); + } + + pub fn remove_project_group( + &mut self, + key: &ProjectGroupKey, + window: &mut Window, + cx: &mut Context, + ) { + self.project_group_keys.retain(|k| k != key); + + let workspaces: Vec<_> = self + .workspaces_for_project_group(key, cx) + .cloned() + .collect(); + for workspace in workspaces { + self.remove(&workspace, window, cx); + } + + self.serialize(cx); + cx.notify(); + } + + /// Finds an existing workspace in this multi-workspace whose paths match, + /// or creates a new one (deserializing its saved state from the database). + /// Never searches other windows or matches workspaces with a superset of + /// the requested paths. + pub fn find_or_create_local_workspace( + &mut self, + path_list: PathList, + window: &mut Window, + cx: &mut Context, + ) -> Task>> { + if let Some(workspace) = self + .workspaces + .iter() + .find(|ws| PathList::new(&ws.read(cx).root_paths(cx)) == path_list) + .cloned() + { + self.activate(workspace.clone(), window, cx); + return Task::ready(Ok(workspace)); + } + + if let Some(transient) = self.active_workspace.transient_workspace() { + if transient.read(cx).project_group_key(cx).path_list() == &path_list { + return Task::ready(Ok(transient.clone())); } - cx.notify(); - return; } - let old_workspace = std::mem::replace( - &mut self.workspaces[self.active_workspace_index], - workspace.clone(), - ); + let paths = path_list.paths().to_vec(); + let app_state = self.workspace().read(cx).app_state().clone(); + let requesting_window = window.window_handle().downcast::(); + + cx.spawn(async move |_this, cx| { + let result = cx + .update(|cx| { + Workspace::new_local( + paths, + app_state, + requesting_window, + None, + None, + OpenMode::Activate, + cx, + ) + }) + .await?; + Ok(result.workspace) + }) + } - let old_entity_id = old_workspace.entity_id(); - self.detach_workspace(&old_workspace, cx); + pub fn workspace(&self) -> &Entity { + match &self.active_workspace { + ActiveWorkspace::Persistent(index) => &self.workspaces[*index], + ActiveWorkspace::Transient(workspace) => workspace, + } + } - Self::subscribe_to_workspace(&workspace, window, cx); - self.sync_sidebar_to_workspace(&workspace, cx); + pub fn workspaces(&self) -> impl Iterator> { + self.workspaces + .iter() + .chain(self.active_workspace.transient_workspace()) + } + + /// Adds a workspace to this window as persistent without changing which + /// workspace is active. Unlike `activate()`, this always inserts into the + /// persistent list regardless of sidebar state — it's used for system- + /// initiated additions like deserialization and worktree discovery. + pub fn add(&mut self, workspace: Entity, window: &Window, cx: &mut Context) { + self.insert_workspace(workspace, window, cx); + } + + /// Ensures the workspace is in the multiworkspace and makes it the active one. + pub fn activate( + &mut self, + workspace: Entity, + window: &mut Window, + cx: &mut Context, + ) { + // Re-activating the current workspace is a no-op. + if self.workspace() == &workspace { + self.focus_active_workspace(window, cx); + return; + } + + // Resolve where we're going. + let new_index = if let Some(index) = self.workspaces.iter().position(|w| *w == workspace) { + Some(index) + } else if self.sidebar_open { + Some(self.insert_workspace(workspace.clone(), &*window, cx)) + } else { + None + }; + + // Transition the active workspace. + if let Some(index) = new_index { + if let Some(old) = self.active_workspace.set_persistent(index) { + if self.sidebar_open { + self.promote_transient(old, cx); + } else { + self.detach_workspace(&old, cx); + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id())); + } + } + } else { + Self::subscribe_to_workspace(&workspace, window, cx); + let weak_self = cx.weak_entity(); + workspace.update(cx, |workspace, cx| { + workspace.set_multi_workspace(weak_self, cx); + }); + if let Some(old) = self.active_workspace.set_transient(workspace) { + self.detach_workspace(&old, cx); + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old.entity_id())); + } + } - cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old_entity_id)); - cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace)); cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); self.serialize(cx); + self.focus_active_workspace(window, cx); cx.notify(); } - fn set_single_workspace(&mut self, workspace: Entity, cx: &mut Context) { - self.workspaces[0] = workspace; - self.active_workspace_index = 0; - cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); + /// Promotes a former transient workspace into the persistent list. + /// Returns the index of the newly inserted workspace. + fn promote_transient(&mut self, workspace: Entity, cx: &mut Context) -> usize { + let project_group_key = workspace.read(cx).project().read(cx).project_group_key(cx); + self.add_project_group_key(project_group_key); + self.workspaces.push(workspace.clone()); + cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace)); + self.workspaces.len() - 1 + } + + /// Collapses to a single transient workspace, discarding all persistent + /// workspaces. Used when multi-workspace is disabled (e.g. disable_ai). + fn collapse_to_single_workspace(&mut self, window: &mut Window, cx: &mut Context) { + if self.sidebar_open { + self.close_sidebar(window, cx); + } + let active = self.workspace().clone(); + for workspace in std::mem::take(&mut self.workspaces) { + if workspace != active { + self.detach_workspace(&workspace, cx); + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(workspace.entity_id())); + } + } + self.project_group_keys.clear(); + self.active_workspace = ActiveWorkspace::Transient(active); cx.notify(); } @@ -553,12 +848,16 @@ impl MultiWorkspace { if let Some(index) = self.workspaces.iter().position(|w| *w == workspace) { index } else { + let project_group_key = workspace.read(cx).project().read(cx).project_group_key(cx); + Self::subscribe_to_workspace(&workspace, window, cx); self.sync_sidebar_to_workspace(&workspace, cx); let weak_self = cx.weak_entity(); workspace.update(cx, |workspace, cx| { workspace.set_multi_workspace(weak_self, cx); }); + + self.add_project_group_key(project_group_key); self.workspaces.push(workspace.clone()); cx.emit(MultiWorkspaceEvent::WorkspaceAdded(workspace)); cx.notify(); @@ -589,7 +888,7 @@ impl MultiWorkspace { } fn sync_sidebar_to_workspace(&self, workspace: &Entity, cx: &mut Context) { - if self.sidebar_open { + if self.sidebar_open() { let sidebar_focus_handle = self.sidebar.as_ref().map(|s| s.focus_handle(cx)); workspace.update(cx, |workspace, _| { workspace.set_sidebar_focus_handle(sidebar_focus_handle); @@ -597,36 +896,17 @@ impl MultiWorkspace { } } - fn cycle_workspace(&mut self, delta: isize, window: &mut Window, cx: &mut Context) { - let count = self.workspaces.len() as isize; - if count <= 1 { - return; - } - let current = self.active_workspace_index as isize; - let next = ((current + delta).rem_euclid(count)) as usize; - let workspace = self.workspaces[next].clone(); - self.activate(workspace, window, cx); - } - - fn next_workspace(&mut self, _: &NextWorkspace, window: &mut Window, cx: &mut Context) { - self.cycle_workspace(1, window, cx); - } - - fn previous_workspace( - &mut self, - _: &PreviousWorkspace, - window: &mut Window, - cx: &mut Context, - ) { - self.cycle_workspace(-1, window, cx); - } - pub(crate) fn serialize(&mut self, cx: &mut Context) { self._serialize_task = Some(cx.spawn(async move |this, cx| { let Some((window_id, state)) = this .read_with(cx, |this, cx| { - let state = crate::persistence::model::MultiWorkspaceState { + let state = MultiWorkspaceState { active_workspace_id: this.workspace().read(cx).database_id(), + project_group_keys: this + .project_group_keys() + .cloned() + .map(Into::into) + .collect::>(), sidebar_open: this.sidebar_open, sidebar_state: this.sidebar.as_ref().and_then(|s| s.serialized_state(cx)), }; @@ -841,26 +1121,82 @@ impl MultiWorkspace { let Some(index) = self.workspaces.iter().position(|w| w == workspace) else { return false; }; + + let old_key = workspace.read(cx).project_group_key(cx); + if self.workspaces.len() <= 1 { - return false; - } + let has_worktrees = workspace.read(cx).visible_worktrees(cx).next().is_some(); + + if !has_worktrees { + return false; + } - let removed_workspace = self.workspaces.remove(index); + let old_workspace = workspace.clone(); + let old_entity_id = old_workspace.entity_id(); - if self.active_workspace_index >= self.workspaces.len() { - self.active_workspace_index = self.workspaces.len() - 1; - } else if self.active_workspace_index > index { - self.active_workspace_index -= 1; + let app_state = old_workspace.read(cx).app_state().clone(); + + let project = Project::local( + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + None, + project::LocalProjectFlags::default(), + cx, + ); + + let new_workspace = cx.new(|cx| Workspace::new(None, project, app_state, window, cx)); + + self.workspaces[0] = new_workspace.clone(); + self.active_workspace = ActiveWorkspace::Persistent(0); + + Self::subscribe_to_workspace(&new_workspace, window, cx); + + self.sync_sidebar_to_workspace(&new_workspace, cx); + + let weak_self = cx.weak_entity(); + + new_workspace.update(cx, |workspace, cx| { + workspace.set_multi_workspace(weak_self, cx); + }); + + self.detach_workspace(&old_workspace, cx); + + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved(old_entity_id)); + cx.emit(MultiWorkspaceEvent::WorkspaceAdded(new_workspace)); + cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); + } else { + let removed_workspace = self.workspaces.remove(index); + + if let Some(active_index) = self.active_workspace.persistent_index() { + if active_index >= self.workspaces.len() { + self.active_workspace = ActiveWorkspace::Persistent(self.workspaces.len() - 1); + } else if active_index > index { + self.active_workspace = ActiveWorkspace::Persistent(active_index - 1); + } + } + + self.detach_workspace(&removed_workspace, cx); + + cx.emit(MultiWorkspaceEvent::WorkspaceRemoved( + removed_workspace.entity_id(), + )); + cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); } - self.detach_workspace(&removed_workspace, cx); + let key_still_in_use = self + .workspaces + .iter() + .any(|ws| ws.read(cx).project_group_key(cx) == old_key); + + if !key_still_in_use { + self.project_group_keys.retain(|k| k != &old_key); + } self.serialize(cx); self.focus_active_workspace(window, cx); - cx.emit(MultiWorkspaceEvent::WorkspaceRemoved( - removed_workspace.entity_id(), - )); - cx.emit(MultiWorkspaceEvent::ActiveWorkspaceChanged); cx.notify(); true @@ -877,7 +1213,7 @@ impl MultiWorkspace { return; } - let app_state: Arc = workspace.read(cx).app_state().clone(); + let app_state: Arc = workspace.read(cx).app_state().clone(); cx.defer(move |cx| { let options = (app_state.build_window_options)(None, cx); @@ -894,6 +1230,58 @@ impl MultiWorkspace { }); } + pub fn move_project_group_to_new_window( + &mut self, + key: &ProjectGroupKey, + window: &mut Window, + cx: &mut Context, + ) { + let workspaces: Vec<_> = self + .workspaces_for_project_group(key, cx) + .cloned() + .collect(); + if workspaces.is_empty() { + return; + } + + self.project_group_keys.retain(|k| k != key); + + let mut removed = Vec::new(); + for workspace in &workspaces { + if self.remove(workspace, window, cx) { + removed.push(workspace.clone()); + } + } + + if removed.is_empty() { + return; + } + + let app_state = removed[0].read(cx).app_state().clone(); + + cx.defer(move |cx| { + let options = (app_state.build_window_options)(None, cx); + + let first = removed[0].clone(); + let rest = removed[1..].to_vec(); + + let Ok(new_window) = cx.open_window(options, |window, cx| { + cx.new(|cx| MultiWorkspace::new(first, window, cx)) + }) else { + return; + }; + + new_window + .update(cx, |mw, window, cx| { + for workspace in rest { + mw.activate(workspace, window, cx); + } + window.activate_window(); + }) + .log_err(); + }); + } + fn move_active_workspace_to_new_window( &mut self, _: &MoveWorkspaceToNewWindow, @@ -911,17 +1299,10 @@ impl MultiWorkspace { window: &mut Window, cx: &mut Context, ) -> Task>> { - let workspace = self.workspace().clone(); - - let needs_close_prompt = - open_mode == OpenMode::Replace || !self.multi_workspace_enabled(cx); - let open_mode = if self.multi_workspace_enabled(cx) { - open_mode + if self.multi_workspace_enabled(cx) { + self.find_or_create_local_workspace(PathList::new(&paths), window, cx) } else { - OpenMode::Replace - }; - - if needs_close_prompt { + let workspace = self.workspace().clone(); cx.spawn_in(window, async move |_this, cx| { let should_continue = workspace .update_in(cx, |workspace, window, cx| { @@ -938,10 +1319,6 @@ impl MultiWorkspace { Ok(workspace) } }) - } else { - workspace.update(cx, |workspace, cx| { - workspace.open_workspace_for_paths(open_mode, paths, window, cx) - }) } } } @@ -1048,8 +1425,6 @@ impl Render for MultiWorkspace { this.focus_sidebar(window, cx); }, )) - .on_action(cx.listener(Self::next_workspace)) - .on_action(cx.listener(Self::previous_workspace)) .on_action(cx.listener(Self::move_active_workspace_to_new_window)) .on_action(cx.listener( |this: &mut Self, action: &ToggleThreadSwitcher, window, cx| { diff --git a/crates/workspace/src/multi_workspace_tests.rs b/crates/workspace/src/multi_workspace_tests.rs index 50161121719ec7b2835fd11e389f24860e57d8f5..ab6ca43d5aff482b637add9083b1ad9d388d7993 100644 --- a/crates/workspace/src/multi_workspace_tests.rs +++ b/crates/workspace/src/multi_workspace_tests.rs @@ -2,7 +2,8 @@ use super::*; use feature_flags::FeatureFlagAppExt; use fs::FakeFs; use gpui::TestAppContext; -use project::DisableAiSettings; +use project::{DisableAiSettings, ProjectGroupKey}; +use serde_json::json; use settings::SettingsStore; fn init_test(cx: &mut TestAppContext) { @@ -87,86 +88,256 @@ async fn test_sidebar_disabled_when_disable_ai_is_enabled(cx: &mut TestAppContex } #[gpui::test] -async fn test_replace(cx: &mut TestAppContext) { +async fn test_project_group_keys_initial(cx: &mut TestAppContext) { init_test(cx); let fs = FakeFs::new(cx.executor()); - let project_a = Project::test(fs.clone(), [], cx).await; - let project_b = Project::test(fs.clone(), [], cx).await; - let project_c = Project::test(fs.clone(), [], cx).await; - let project_d = Project::test(fs.clone(), [], cx).await; + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + let project = Project::test(fs, ["/root_a".as_ref()], cx).await; + + let expected_key = project.read_with(cx, |project, cx| project.project_group_key(cx)); let (multi_workspace, cx) = - cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx)); + + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); - let workspace_a_id = multi_workspace.read_with(cx, |mw, _cx| mw.workspaces()[0].entity_id()); + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); + assert_eq!(keys.len(), 1, "should have exactly one key on creation"); + assert_eq!(*keys[0], expected_key); + }); +} - // Replace the only workspace (single-workspace case). - let workspace_b = multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = cx.new(|cx| Workspace::test_new(project_b.clone(), window, cx)); - mw.replace(workspace.clone(), &*window, cx); - workspace +#[gpui::test] +async fn test_project_group_keys_add_workspace(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; + let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await; + let project_b = Project::test(fs.clone(), ["/root_b".as_ref()], cx).await; + + let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); + let key_b = project_b.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_ne!( + key_a, key_b, + "different roots should produce different keys" + ); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); + + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + + multi_workspace.read_with(cx, |mw, _cx| { + assert_eq!(mw.project_group_keys().count(), 1); + }); + + // Adding a workspace with a different project root adds a new key. + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx); }); multi_workspace.read_with(cx, |mw, _cx| { - assert_eq!(mw.workspaces().len(), 1); + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); assert_eq!( - mw.workspaces()[0].entity_id(), - workspace_b.entity_id(), - "slot should now be project_b" - ); - assert_ne!( - mw.workspaces()[0].entity_id(), - workspace_a_id, - "project_a should be gone" + keys.len(), + 2, + "should have two keys after adding a second workspace" ); + assert_eq!(*keys[0], key_a); + assert_eq!(*keys[1], key_b); }); +} + +#[gpui::test] +async fn test_project_group_keys_duplicate_not_added(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await; + // A second project entity pointing at the same path produces the same key. + let project_a2 = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await; + + let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); + let key_a2 = project_a2.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_eq!(key_a, key_a2, "same root path should produce the same key"); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a, window, cx)); - // Add project_c as a second workspace, then replace it with project_d. - let workspace_c = multi_workspace.update_in(cx, |mw, window, cx| { - mw.test_add_workspace(project_c.clone(), window, cx) + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_a2, window, cx); }); multi_workspace.read_with(cx, |mw, _cx| { - assert_eq!(mw.workspaces().len(), 2); - assert_eq!(mw.active_workspace_index(), 1); + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); + assert_eq!( + keys.len(), + 1, + "duplicate key should not be added when a workspace with the same root is inserted" + ); }); +} + +#[gpui::test] +async fn test_project_group_keys_on_worktree_added(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; + let project = Project::test(fs, ["/root_a".as_ref()], cx).await; + + let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); - let workspace_d = multi_workspace.update_in(cx, |mw, window, cx| { - let workspace = cx.new(|cx| Workspace::test_new(project_d.clone(), window, cx)); - mw.replace(workspace.clone(), &*window, cx); - workspace + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); }); + // Add a second worktree to the same project. + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree("/root_b", true, cx) + }) + .await + .unwrap(); + worktree + .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + cx.run_until_parked(); + + let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_ne!( + initial_key, updated_key, + "key should change after adding a worktree" + ); + multi_workspace.read_with(cx, |mw, _cx| { - assert_eq!(mw.workspaces().len(), 2, "should still have 2 workspaces"); - assert_eq!(mw.active_workspace_index(), 1); + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); assert_eq!( - mw.workspaces()[1].entity_id(), - workspace_d.entity_id(), - "active slot should now be project_d" - ); - assert_ne!( - mw.workspaces()[1].entity_id(), - workspace_c.entity_id(), - "project_c should be gone" + keys.len(), + 2, + "should have both the original and updated key" ); + assert_eq!(*keys[0], initial_key); + assert_eq!(*keys[1], updated_key); }); +} - // Replace with workspace_b which is already in the list — should just switch. - multi_workspace.update_in(cx, |mw, window, cx| { - mw.replace(workspace_b.clone(), &*window, cx); +#[gpui::test] +async fn test_project_group_keys_on_worktree_removed(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; + let project = Project::test(fs, ["/root_a".as_ref(), "/root_b".as_ref()], cx).await; + + let initial_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); }); + // Remove one worktree. + let worktree_b_id = project.read_with(cx, |project, cx| { + project + .worktrees(cx) + .find(|wt| wt.read(cx).root_name().as_unix_str() == "root_b") + .unwrap() + .read(cx) + .id() + }); + project.update(cx, |project, cx| { + project.remove_worktree(worktree_b_id, cx); + }); + cx.run_until_parked(); + + let updated_key = project.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_ne!( + initial_key, updated_key, + "key should change after removing a worktree" + ); + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); assert_eq!( - mw.workspaces().len(), + keys.len(), 2, - "no workspace should be added or removed" + "should accumulate both the original and post-removal key" ); + assert_eq!(*keys[0], initial_key); + assert_eq!(*keys[1], updated_key); + }); +} + +#[gpui::test] +async fn test_project_group_keys_across_multiple_workspaces_and_worktree_changes( + cx: &mut TestAppContext, +) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root_a", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_b", json!({ "file.txt": "" })).await; + fs.insert_tree("/root_c", json!({ "file.txt": "" })).await; + let project_a = Project::test(fs.clone(), ["/root_a".as_ref()], cx).await; + let project_b = Project::test(fs.clone(), ["/root_b".as_ref()], cx).await; + + let key_a = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); + let key_b = project_b.read_with(cx, |p, cx| p.project_group_key(cx)); + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx); + }); + + multi_workspace.read_with(cx, |mw, _cx| { + assert_eq!(mw.project_group_keys().count(), 2); + }); + + // Now add a worktree to project_a. This should produce a third key. + let (worktree, _) = project_a + .update(cx, |project, cx| { + project.find_or_create_worktree("/root_c", true, cx) + }) + .await + .unwrap(); + worktree + .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + cx.run_until_parked(); + + let key_a_updated = project_a.read_with(cx, |p, cx| p.project_group_key(cx)); + assert_ne!(key_a, key_a_updated); + + multi_workspace.read_with(cx, |mw, _cx| { + let keys: Vec<&ProjectGroupKey> = mw.project_group_keys().collect(); assert_eq!( - mw.active_workspace_index(), - 0, - "should have switched to workspace_b" + keys.len(), + 3, + "should have key_a, key_b, and the updated key_a with root_c" ); + assert_eq!(*keys[0], key_a); + assert_eq!(*keys[1], key_b); + assert_eq!(*keys[2], key_a_updated); }); } diff --git a/crates/workspace/src/notifications.rs b/crates/workspace/src/notifications.rs index b4f683fa6952b9d6f26b8933e010f4c7d2de898c..ce54765e3ff81fde015d465d18b03cea44bbbe8f 100644 --- a/crates/workspace/src/notifications.rs +++ b/crates/workspace/src/notifications.rs @@ -1226,10 +1226,8 @@ where let mut display = format!("{err:#}"); if !display.ends_with('\n') { display.push('.'); - display.push(' ') } - let detail = - f(err, window, cx).unwrap_or_else(|| format!("{display}Please try again.")); + let detail = f(err, window, cx).unwrap_or(display); window.prompt(PromptLevel::Critical, &msg, Some(&detail), &["Ok"], cx) }) { prompt.await.ok(); diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 356e17e3ce51c8e13f412e90b7cf815a7b3bd260..e277df655411bf4d2c91d679ffe9beeae6be0ae6 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -2,6 +2,7 @@ use crate::{ CloseWindow, NewFile, NewTerminal, OpenInTerminal, OpenOptions, OpenTerminal, OpenVisible, SplitDirection, ToggleFileFinder, ToggleProjectSymbols, ToggleZoom, Workspace, WorkspaceItemBuilder, ZoomIn, ZoomOut, + focus_follows_mouse::FocusFollowsMouse as _, invalid_item_view::InvalidItemView, item::{ ActivateOnClose, ClosePosition, Item, ItemBufferKind, ItemHandle, ItemSettings, @@ -11,7 +12,7 @@ use crate::{ move_item, notifications::NotifyResultExt, toolbar::Toolbar, - workspace_settings::{AutosaveSetting, TabBarSettings, WorkspaceSettings}, + workspace_settings::{AutosaveSetting, FocusFollowsMouse, TabBarSettings, WorkspaceSettings}, }; use anyhow::Result; use collections::{BTreeSet, HashMap, HashSet, VecDeque}; @@ -443,6 +444,7 @@ pub struct Pane { pinned_tab_count: usize, diagnostics: HashMap, zoom_out_on_close: bool, + focus_follows_mouse: FocusFollowsMouse, diagnostic_summary_update: Task<()>, /// If a certain project item wants to get recreated with specific data, it can persist its data before the recreation here. pub project_item_restoration_data: HashMap>, @@ -615,6 +617,7 @@ impl Pane { pinned_tab_count: 0, diagnostics: Default::default(), zoom_out_on_close: true, + focus_follows_mouse: WorkspaceSettings::get_global(cx).focus_follows_mouse, diagnostic_summary_update: Task::ready(()), project_item_restoration_data: HashMap::default(), welcome_page: None, @@ -782,7 +785,6 @@ impl Pane { fn settings_changed(&mut self, window: &mut Window, cx: &mut Context) { let tab_bar_settings = TabBarSettings::get_global(cx); - let new_max_tabs = WorkspaceSettings::get_global(cx).max_tabs; if let Some(display_nav_history_buttons) = self.display_nav_history_buttons.as_mut() { *display_nav_history_buttons = tab_bar_settings.show_nav_history_buttons; @@ -795,6 +797,12 @@ impl Pane { self.nav_history.0.lock().preview_item_id = None; } + let workspace_settings = WorkspaceSettings::get_global(cx); + + self.focus_follows_mouse = workspace_settings.focus_follows_mouse; + + let new_max_tabs = workspace_settings.max_tabs; + if self.use_max_tabs && new_max_tabs != self.max_tabs { self.max_tabs = new_max_tabs; self.close_items_on_settings_change(window, cx); @@ -3662,6 +3670,11 @@ impl Pane { this.drag_split_direction = None; this.handle_external_paths_drop(paths, window, cx) })) + .on_click(cx.listener(move |this, event: &ClickEvent, window, cx| { + if event.click_count() == 2 { + window.dispatch_action(this.double_click_dispatch_action.boxed_clone(), cx); + } + })) } pub fn render_menu_overlay(menu: &Entity) -> Div { @@ -4460,6 +4473,7 @@ impl Render for Pane { placeholder.child(self.welcome_page.clone().unwrap()) } } + .focus_follows_mouse(self.focus_follows_mouse, cx) }) .child( // drag target @@ -4881,14 +4895,17 @@ impl Render for DraggedTab { #[cfg(test)] mod tests { - use std::{cell::Cell, iter::zip, num::NonZero}; + use std::{cell::Cell, iter::zip, num::NonZero, rc::Rc}; use super::*; use crate::{ Member, item::test::{TestItem, TestProjectItem}, }; - use gpui::{AppContext, Axis, TestAppContext, VisualTestContext, size}; + use gpui::{ + AppContext, Axis, Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, + TestAppContext, VisualTestContext, size, + }; use project::FakeFs; use settings::SettingsStore; use theme::LoadThemes; @@ -6613,8 +6630,6 @@ mod tests { #[gpui::test] async fn test_drag_tab_to_middle_tab_with_mouse_events(cx: &mut TestAppContext) { - use gpui::{Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent}; - init_test(cx); let fs = FakeFs::new(cx.executor()); @@ -6666,8 +6681,6 @@ mod tests { async fn test_drag_pinned_tab_when_show_pinned_tabs_in_separate_row_enabled( cx: &mut TestAppContext, ) { - use gpui::{Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent}; - init_test(cx); set_pinned_tabs_separate_row(cx, true); let fs = FakeFs::new(cx.executor()); @@ -6743,8 +6756,6 @@ mod tests { async fn test_drag_unpinned_tab_when_show_pinned_tabs_in_separate_row_enabled( cx: &mut TestAppContext, ) { - use gpui::{Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent}; - init_test(cx); set_pinned_tabs_separate_row(cx, true); let fs = FakeFs::new(cx.executor()); @@ -6797,8 +6808,6 @@ mod tests { async fn test_drag_mixed_tabs_when_show_pinned_tabs_in_separate_row_enabled( cx: &mut TestAppContext, ) { - use gpui::{Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent}; - init_test(cx); set_pinned_tabs_separate_row(cx, true); let fs = FakeFs::new(cx.executor()); @@ -6864,8 +6873,6 @@ mod tests { #[gpui::test] async fn test_middle_click_pinned_tab_does_not_close(cx: &mut TestAppContext) { - use gpui::{Modifiers, MouseButton, MouseDownEvent, MouseUpEvent}; - init_test(cx); let fs = FakeFs::new(cx.executor()); @@ -6935,6 +6942,74 @@ mod tests { assert_item_labels(&pane, ["A*!"], cx); } + #[gpui::test] + async fn test_double_click_pinned_tab_bar_empty_space_creates_new_tab(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); + + // The real NewFile handler lives in editor::init, which isn't initialized + // in workspace tests. Register a global action handler that sets a flag so + // we can verify the action is dispatched without depending on the editor crate. + // TODO: If editor::init is ever available in workspace tests, remove this + // flag and assert the resulting tab bar state directly instead. + let new_file_dispatched = Rc::new(Cell::new(false)); + cx.update(|_, cx| { + let new_file_dispatched = new_file_dispatched.clone(); + cx.on_action(move |_: &NewFile, _cx| { + new_file_dispatched.set(true); + }); + }); + + set_pinned_tabs_separate_row(cx, true); + + let item_a = add_labeled_item(&pane, "A", false, cx); + add_labeled_item(&pane, "B", false, cx); + + pane.update_in(cx, |pane, window, cx| { + let ix = pane + .index_for_item_id(item_a.item_id()) + .expect("item A should exist"); + pane.pin_tab_at(ix, window, cx); + }); + assert_item_labels(&pane, ["A!", "B*"], cx); + cx.run_until_parked(); + + let pinned_drop_target_bounds = cx + .debug_bounds("pinned_tabs_border") + .expect("pinned_tabs_border should have debug bounds"); + + cx.simulate_event(MouseDownEvent { + position: pinned_drop_target_bounds.center(), + button: MouseButton::Left, + modifiers: Modifiers::default(), + click_count: 2, + first_mouse: false, + }); + + cx.run_until_parked(); + + cx.simulate_event(MouseUpEvent { + position: pinned_drop_target_bounds.center(), + button: MouseButton::Left, + modifiers: Modifiers::default(), + click_count: 2, + }); + + cx.run_until_parked(); + + // TODO: If editor::init is ever available in workspace tests, replace this + // with an assert_item_labels check that verifies a new tab is actually created. + assert!( + new_file_dispatched.get(), + "Double-clicking pinned tab bar empty space should dispatch the new file action" + ); + } + #[gpui::test] async fn test_add_item_with_new_item(cx: &mut TestAppContext) { init_test(cx); diff --git a/crates/workspace/src/pane_group.rs b/crates/workspace/src/pane_group.rs index 3fa4800afb6088e0d106c8b60a835073978e598c..c5f78eef6c4a7403589cb4e947326f9fe87ec610 100644 --- a/crates/workspace/src/pane_group.rs +++ b/crates/workspace/src/pane_group.rs @@ -1,6 +1,7 @@ use crate::{ AnyActiveCall, AppState, CollaboratorId, FollowerState, Pane, ParticipantLocation, Workspace, WorkspaceSettings, + notifications::DetachAndPromptErr, pane_group::element::pane_axis, workspace_settings::{PaneSplitDirectionHorizontal, PaneSplitDirectionVertical}, }; @@ -438,14 +439,19 @@ impl PaneLeaderDecorator for PaneRenderContext<'_> { let app_state = self.app_state.clone(); this.cursor_pointer().on_mouse_down( MouseButton::Left, - move |_, _, cx| { + move |_, window, cx| { crate::join_in_room_project( leader_project_id, leader_user_id, app_state.clone(), cx, ) - .detach_and_log_err(cx); + .detach_and_prompt_err( + "Failed to join project", + window, + cx, + |error, _, _| Some(format!("{error:#}")), + ); }, ) }, diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index d38602ea768e8edc4f3de1ec439e67f0ee432a63..2994e9d0f67d73a30838f922c9b6a0b01b21ed14 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -337,15 +337,20 @@ pub fn read_serialized_multi_workspaces( window_groups .into_iter() - .map(|group| { + .filter_map(|group| { let window_id = group.first().and_then(|sw| sw.window_id); let state = window_id .map(|wid| read_multi_workspace_state(wid, cx)) .unwrap_or_default(); - model::SerializedMultiWorkspace { - workspaces: group, + let active_workspace = state + .active_workspace_id + .and_then(|id| group.iter().position(|ws| ws.workspace_id == id)) + .or(Some(0)) + .and_then(|index| group.into_iter().nth(index))?; + Some(model::SerializedMultiWorkspace { + active_workspace, state, - } + }) }) .collect() } @@ -2488,11 +2493,20 @@ pub fn delete_unloaded_items( #[cfg(test)] mod tests { use super::*; - use crate::persistence::model::{ - SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace, SessionWorkspace, + use crate::{ + multi_workspace::MultiWorkspace, + persistence::{ + model::{ + SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace, + SessionWorkspace, + }, + read_multi_workspace_state, + }, }; - use gpui; + use feature_flags::FeatureFlagAppExt; + use gpui::AppContext as _; use pretty_assertions::assert_eq; + use project::{Project, ProjectGroupKey}; use remote::SshConnectionOptions; use serde_json::json; use std::{thread, time::Duration}; @@ -2507,12 +2521,6 @@ mod tests { #[gpui::test] async fn test_multi_workspace_serializes_on_add_and_remove(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use crate::persistence::read_multi_workspace_state; - use feature_flags::FeatureFlagAppExt; - use gpui::AppContext as _; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -2527,6 +2535,10 @@ mod tests { let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx)); + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + multi_workspace.update_in(cx, |mw, _, cx| { mw.set_random_database_id(cx); }); @@ -2556,7 +2568,7 @@ mod tests { // --- Remove the second workspace (index 1) --- multi_workspace.update_in(cx, |mw, window, cx| { - let ws = mw.workspaces()[1].clone(); + let ws = mw.workspaces().nth(1).unwrap().clone(); mw.remove(&ws, window, cx); }); @@ -3993,6 +4005,7 @@ mod tests { window_10, MultiWorkspaceState { active_workspace_id: Some(WorkspaceId(2)), + project_group_keys: vec![], sidebar_open: true, sidebar_state: None, }, @@ -4004,6 +4017,7 @@ mod tests { window_20, MultiWorkspaceState { active_workspace_id: Some(WorkspaceId(3)), + project_group_keys: vec![], sidebar_open: false, sidebar_state: None, }, @@ -4040,35 +4054,30 @@ mod tests { let results = cx.update(|cx| read_serialized_multi_workspaces(session_workspaces, cx)); - // Should produce 3 groups: window 10, window 20, and the orphan. + // Should produce 3 results: window 10, window 20, and the orphan. assert_eq!(results.len(), 3); - // Window 10 group: 2 workspaces, active_workspace_id = 2, sidebar open. + // Window 10: active_workspace_id = 2 picks workspace 2 (paths /b), sidebar open. let group_10 = &results[0]; - assert_eq!(group_10.workspaces.len(), 2); + assert_eq!(group_10.active_workspace.workspace_id, WorkspaceId(2)); assert_eq!(group_10.state.active_workspace_id, Some(WorkspaceId(2))); assert_eq!(group_10.state.sidebar_open, true); - // Window 20 group: 1 workspace, active_workspace_id = 3, sidebar closed. + // Window 20: active_workspace_id = 3 picks workspace 3 (paths /c), sidebar closed. let group_20 = &results[1]; - assert_eq!(group_20.workspaces.len(), 1); + assert_eq!(group_20.active_workspace.workspace_id, WorkspaceId(3)); assert_eq!(group_20.state.active_workspace_id, Some(WorkspaceId(3))); assert_eq!(group_20.state.sidebar_open, false); - // Orphan group: no window_id, so state is default. + // Orphan: no active_workspace_id, falls back to first workspace (id 4). let group_none = &results[2]; - assert_eq!(group_none.workspaces.len(), 1); + assert_eq!(group_none.active_workspace.workspace_id, WorkspaceId(4)); assert_eq!(group_none.state.active_workspace_id, None); assert_eq!(group_none.state.sidebar_open, false); } #[gpui::test] async fn test_flush_serialization_completes_before_quit(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4114,12 +4123,6 @@ mod tests { #[gpui::test] async fn test_create_workspace_serialization(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use crate::persistence::read_multi_workspace_state; - use feature_flags::FeatureFlagAppExt; - - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4177,11 +4180,6 @@ mod tests { #[gpui::test] async fn test_remove_workspace_clears_session_binding(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - use gpui::AppContext as _; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4197,6 +4195,10 @@ mod tests { let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx)); + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + multi_workspace.update_in(cx, |mw, _, cx| { mw.set_random_database_id(cx); }); @@ -4239,7 +4241,7 @@ mod tests { // Remove workspace at index 1 (the second workspace). multi_workspace.update_in(cx, |mw, window, cx| { - let ws = mw.workspaces()[1].clone(); + let ws = mw.workspaces().nth(1).unwrap().clone(); mw.remove(&ws, window, cx); }); @@ -4269,11 +4271,6 @@ mod tests { #[gpui::test] async fn test_remove_workspace_not_restored_as_zombie(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - use gpui::AppContext as _; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4299,6 +4296,10 @@ mod tests { let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx)); + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + multi_workspace.update_in(cx, |mw, _, cx| { mw.workspace().update(cx, |ws, _cx| { ws.set_database_id(ws1_id); @@ -4350,7 +4351,7 @@ mod tests { // Remove workspace2 (index 1). multi_workspace.update_in(cx, |mw, window, cx| { - let ws = mw.workspaces()[1].clone(); + let ws = mw.workspaces().nth(1).unwrap().clone(); mw.remove(&ws, window, cx); }); @@ -4376,11 +4377,6 @@ mod tests { #[gpui::test] async fn test_pending_removal_tasks_drained_on_flush(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - use gpui::AppContext as _; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4401,6 +4397,10 @@ mod tests { let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project1.clone(), window, cx)); + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + multi_workspace.update_in(cx, |mw, _, cx| { mw.set_random_database_id(cx); }); @@ -4434,7 +4434,7 @@ mod tests { // Remove workspace2 — this pushes a task to pending_removal_tasks. multi_workspace.update_in(cx, |mw, window, cx| { - let ws = mw.workspaces()[1].clone(); + let ws = mw.workspaces().nth(1).unwrap().clone(); mw.remove(&ws, window, cx); }); @@ -4443,7 +4443,6 @@ mod tests { let all_tasks = multi_workspace.update_in(cx, |mw, window, cx| { let mut tasks: Vec> = mw .workspaces() - .iter() .map(|workspace| { workspace.update(cx, |workspace, cx| { workspace.flush_serialization(window, cx) @@ -4482,10 +4481,6 @@ mod tests { #[gpui::test] async fn test_create_workspace_bounds_observer_uses_fresh_id(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4543,10 +4538,6 @@ mod tests { #[gpui::test] async fn test_flush_serialization_writes_bounds(cx: &mut gpui::TestAppContext) { - use crate::multi_workspace::MultiWorkspace; - use feature_flags::FeatureFlagAppExt; - use project::Project; - crate::tests::init_test(cx); cx.update(|cx| { @@ -4702,4 +4693,223 @@ mod tests { assert_eq!(result[2].2.paths(), &[PathBuf::from("/plain-project")]); assert_eq!(result[2].0, WorkspaceId(4)); } + + #[gpui::test] + async fn test_restore_window_with_linked_worktree_and_multiple_project_groups( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + cx.update(|cx| { + cx.set_staff(true); + cx.update_flags(true, vec!["agent-v2".to_string()]); + }); + + let fs = fs::FakeFs::new(cx.executor()); + + // Main git repo at /repo + fs.insert_tree( + "/repo", + json!({ + ".git": { + "HEAD": "ref: refs/heads/main", + "worktrees": { + "feature": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature" + } + } + }, + "src": { "main.rs": "" } + }), + ) + .await; + + // Linked worktree checkout pointing back to /repo + fs.insert_tree( + "/worktree-feature", + json!({ + ".git": "gitdir: /repo/.git/worktrees/feature", + "src": { "lib.rs": "" } + }), + ) + .await; + + // --- Phase 1: Set up the original multi-workspace window --- + + let project_1 = Project::test(fs.clone(), ["/repo".as_ref()], cx).await; + let project_1_linked_worktree = + Project::test(fs.clone(), ["/worktree-feature".as_ref()], cx).await; + + // Wait for git discovery to finish. + cx.run_until_parked(); + + // Create a second, unrelated project so we have two distinct project groups. + fs.insert_tree( + "/other-project", + json!({ + ".git": { "HEAD": "ref: refs/heads/main" }, + "readme.md": "" + }), + ) + .await; + let project_2 = Project::test(fs.clone(), ["/other-project".as_ref()], cx).await; + cx.run_until_parked(); + + // Create the MultiWorkspace with project_2, then add the main repo + // and its linked worktree. The linked worktree is added last and + // becomes the active workspace. + let (multi_workspace, cx) = cx + .add_window_view(|window, cx| MultiWorkspace::test_new(project_2.clone(), window, cx)); + + multi_workspace.update(cx, |mw, cx| { + mw.open_sidebar(cx); + }); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_1.clone(), window, cx); + }); + + let workspace_worktree = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_1_linked_worktree.clone(), window, cx) + }); + + // Assign database IDs and set up session bindings so serialization + // writes real rows. + multi_workspace.update_in(cx, |mw, _, cx| { + for workspace in mw.workspaces() { + workspace.update(cx, |ws, _cx| { + ws.set_random_database_id(); + }); + } + }); + + // Flush serialization for each individual workspace (writes to SQLite) + // and for the MultiWorkspace (writes to KVP). + let tasks = multi_workspace.update_in(cx, |mw, window, cx| { + let session_id = mw.workspace().read(cx).session_id(); + let window_id_u64 = window.window_handle().window_id().as_u64(); + + let mut tasks: Vec> = Vec::new(); + for workspace in mw.workspaces() { + tasks.push(workspace.update(cx, |ws, cx| ws.flush_serialization(window, cx))); + if let Some(db_id) = workspace.read(cx).database_id() { + let db = WorkspaceDb::global(cx); + let session_id = session_id.clone(); + tasks.push(cx.background_spawn(async move { + db.set_session_binding(db_id, session_id, Some(window_id_u64)) + .await + .log_err(); + })); + } + } + mw.serialize(cx); + tasks + }); + cx.run_until_parked(); + for task in tasks { + task.await; + } + cx.run_until_parked(); + + let active_db_id = workspace_worktree.read_with(cx, |ws, _| ws.database_id()); + assert!( + active_db_id.is_some(), + "Active workspace should have a database ID" + ); + + // --- Phase 2: Read back and verify the serialized state --- + + let session_id = multi_workspace + .read_with(cx, |mw, cx| mw.workspace().read(cx).session_id()) + .unwrap(); + let db = cx.update(|_, cx| WorkspaceDb::global(cx)); + let session_workspaces = db + .last_session_workspace_locations(&session_id, None, fs.as_ref()) + .await + .expect("should load session workspaces"); + assert!( + !session_workspaces.is_empty(), + "Should have at least one session workspace" + ); + + let multi_workspaces = + cx.update(|_, cx| read_serialized_multi_workspaces(session_workspaces, cx)); + assert_eq!( + multi_workspaces.len(), + 1, + "All workspaces share one window, so there should be exactly one multi-workspace" + ); + + let serialized = &multi_workspaces[0]; + assert_eq!( + serialized.active_workspace.workspace_id, + active_db_id.unwrap(), + ); + assert_eq!(serialized.state.project_group_keys.len(), 2,); + + // Verify the serialized project group keys round-trip back to the + // originals. + let restored_keys: Vec = serialized + .state + .project_group_keys + .iter() + .cloned() + .map(Into::into) + .collect(); + let expected_keys = vec![ + ProjectGroupKey::new(None, PathList::new(&["/other-project"])), + ProjectGroupKey::new(None, PathList::new(&["/repo"])), + ]; + assert_eq!( + restored_keys, expected_keys, + "Deserialized project group keys should match the originals" + ); + + // --- Phase 3: Restore the window and verify the result --- + + let app_state = + multi_workspace.read_with(cx, |mw, cx| mw.workspace().read(cx).app_state().clone()); + + let serialized_mw = multi_workspaces.into_iter().next().unwrap(); + let restored_handle: gpui::WindowHandle = cx + .update(|_, cx| { + cx.spawn(async move |mut cx| { + crate::restore_multiworkspace(serialized_mw, app_state, &mut cx).await + }) + }) + .await + .expect("restore_multiworkspace should succeed"); + + cx.run_until_parked(); + + // The restored window should have the same project group keys. + let restored_keys: Vec = restored_handle + .read_with(cx, |mw: &MultiWorkspace, _cx| { + mw.project_group_keys().cloned().collect() + }) + .unwrap(); + assert_eq!( + restored_keys, expected_keys, + "Restored window should have the same project group keys as the original" + ); + + // The active workspace in the restored window should have the linked + // worktree paths. + let active_paths: Vec = restored_handle + .read_with(cx, |mw: &MultiWorkspace, cx| { + mw.workspace() + .read(cx) + .root_paths(cx) + .into_iter() + .map(|p: Arc| p.to_path_buf()) + .collect() + }) + .unwrap(); + assert_eq!( + active_paths, + vec![PathBuf::from("/worktree-feature")], + "The restored active workspace should be the linked worktree project" + ); + } } diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index 6b55d09ebbc2375f8cce3f2b81bc4f1aa9620e76..b50d82fff0b05c3511967dd65a9060e38ca4ca26 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -13,7 +13,7 @@ use db::sqlez::{ use gpui::{AsyncWindowContext, Entity, WeakEntity, WindowId}; use language::{Toolchain, ToolchainScope}; -use project::{Project, debugger::breakpoint_store::SourceBreakpoint}; +use project::{Project, ProjectGroupKey, debugger::breakpoint_store::SourceBreakpoint}; use remote::RemoteConnectionOptions; use serde::{Deserialize, Serialize}; use std::{ @@ -21,7 +21,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use util::ResultExt; +use util::{ResultExt, path_list::SerializedPathList}; use uuid::Uuid; #[derive( @@ -36,7 +36,7 @@ pub(crate) enum RemoteConnectionKind { Docker, } -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, serde::Serialize, serde::Deserialize)] pub enum SerializedWorkspaceLocation { Local, Remote(RemoteConnectionOptions), @@ -59,21 +59,51 @@ pub struct SessionWorkspace { pub window_id: Option, } +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct SerializedProjectGroupKey { + pub path_list: SerializedPathList, + pub(crate) location: SerializedWorkspaceLocation, +} + +impl From for SerializedProjectGroupKey { + fn from(value: ProjectGroupKey) -> Self { + SerializedProjectGroupKey { + path_list: value.path_list().serialize(), + location: match value.host() { + Some(host) => SerializedWorkspaceLocation::Remote(host), + None => SerializedWorkspaceLocation::Local, + }, + } + } +} + +impl From for ProjectGroupKey { + fn from(value: SerializedProjectGroupKey) -> Self { + let path_list = PathList::deserialize(&value.path_list); + let host = match value.location { + SerializedWorkspaceLocation::Local => None, + SerializedWorkspaceLocation::Remote(opts) => Some(opts), + }; + ProjectGroupKey::new(host, path_list) + } +} + /// Per-window state for a MultiWorkspace, persisted to KVP. #[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)] pub struct MultiWorkspaceState { pub active_workspace_id: Option, pub sidebar_open: bool, + pub project_group_keys: Vec, #[serde(default)] pub sidebar_state: Option, } /// The serialized state of a single MultiWorkspace window from a previous session: -/// all workspaces that shared the window, which one was active, and whether the -/// sidebar was open. +/// the active workspace to restore plus window-level state (project group keys, +/// sidebar). #[derive(Debug, Clone)] pub struct SerializedMultiWorkspace { - pub workspaces: Vec, + pub active_workspace: SessionWorkspace, pub state: MultiWorkspaceState, } diff --git a/crates/workspace/src/searchable.rs b/crates/workspace/src/searchable.rs index 93d809d7a522d11e4b4bd78e71899b89aa4d0508..f0932a7d7b3e7880c27b40c28890f063f4de731e 100644 --- a/crates/workspace/src/searchable.rs +++ b/crates/workspace/src/searchable.rs @@ -55,6 +55,7 @@ pub struct SearchOptions { /// Specifies whether the supports search & replace. pub replacement: bool, pub selection: bool, + pub select_all: bool, pub find_in_results: bool, } @@ -78,6 +79,7 @@ pub trait SearchableItem: Item + EventEmitter { regex: true, replacement: true, selection: true, + select_all: true, find_in_results: false, } } diff --git a/crates/workspace/src/security_modal.rs b/crates/workspace/src/security_modal.rs index 664aa891550cecdd602d54bfca579d04e03f33dc..2130a1d1eca3d33651a057d32a252718270f89f8 100644 --- a/crates/workspace/src/security_modal.rs +++ b/crates/workspace/src/security_modal.rs @@ -7,7 +7,7 @@ use std::{ }; use collections::{HashMap, HashSet}; -use gpui::{DismissEvent, EventEmitter, FocusHandle, Focusable, WeakEntity}; +use gpui::{DismissEvent, EventEmitter, FocusHandle, Focusable, ScrollHandle, WeakEntity}; use project::{ WorktreeId, @@ -17,7 +17,8 @@ use project::{ use smallvec::SmallVec; use theme::ActiveTheme; use ui::{ - AlertModal, Checkbox, FluentBuilder, KeyBinding, ListBulletItem, ToggleState, prelude::*, + AlertModal, Checkbox, FluentBuilder, KeyBinding, ListBulletItem, ToggleState, WithScrollbar, + prelude::*, }; use crate::{DismissDecision, ModalView, ToggleWorktreeSecurity}; @@ -29,6 +30,7 @@ pub struct SecurityModal { worktree_store: WeakEntity, remote_host: Option, focus_handle: FocusHandle, + project_list_scroll_handle: ScrollHandle, trusted: Option, } @@ -63,16 +65,17 @@ impl ModalView for SecurityModal { } impl Render for SecurityModal { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { if self.restricted_paths.is_empty() { self.dismiss(cx); return v_flex().into_any_element(); } - let header_label = if self.restricted_paths.len() == 1 { - "Unrecognized Project" + let restricted_count = self.restricted_paths.len(); + let header_label: SharedString = if restricted_count == 1 { + "Unrecognized Project".into() } else { - "Unrecognized Projects" + format!("Unrecognized Projects ({})", restricted_count).into() }; let trust_label = self.build_trust_label(); @@ -102,32 +105,61 @@ impl Render for SecurityModal { .child(Icon::new(IconName::Warning).color(Color::Warning)) .child(Label::new(header_label)), ) - .children(self.restricted_paths.values().filter_map(|restricted_path| { - let abs_path = if restricted_path.is_file { - restricted_path.abs_path.parent() - } else { - Some(restricted_path.abs_path.as_ref()) - }?; - let label = match &restricted_path.host { - Some(remote_host) => match &remote_host.user_name { - Some(user_name) => format!( - "{} ({}@{})", - self.shorten_path(abs_path).display(), - user_name, - remote_host.host_identifier - ), - None => format!( - "{} ({})", - self.shorten_path(abs_path).display(), - remote_host.host_identifier - ), - }, - None => self.shorten_path(abs_path).display().to_string(), - }; - Some(h_flex() - .pl(IconSize::default().rems() + rems(0.5)) - .child(Label::new(label).color(Color::Muted))) - })), + .child( + div() + .size_full() + .vertical_scrollbar_for(&self.project_list_scroll_handle, window, cx) + .child( + v_flex() + .id("paths_container") + .max_h_24() + .overflow_y_scroll() + .track_scroll(&self.project_list_scroll_handle) + .children( + self.restricted_paths.values().filter_map( + |restricted_path| { + let abs_path = if restricted_path.is_file { + restricted_path.abs_path.parent() + } else { + Some(restricted_path.abs_path.as_ref()) + }?; + let label = match &restricted_path.host { + Some(remote_host) => { + match &remote_host.user_name { + Some(user_name) => format!( + "{} ({}@{})", + self.shorten_path(abs_path) + .display(), + user_name, + remote_host.host_identifier + ), + None => format!( + "{} ({})", + self.shorten_path(abs_path) + .display(), + remote_host.host_identifier + ), + } + } + None => self + .shorten_path(abs_path) + .display() + .to_string(), + }; + Some( + h_flex() + .pl( + IconSize::default().rems() + rems(0.5), + ) + .child( + Label::new(label).color(Color::Muted), + ), + ) + }, + ), + ), + ), + ), ) .child( v_flex() @@ -219,6 +251,7 @@ impl SecurityModal { remote_host: remote_host.map(|host| host.into()), restricted_paths: HashMap::default(), focus_handle: cx.focus_handle(), + project_list_scroll_handle: ScrollHandle::new(), trust_parents: false, home_dir: std::env::home_dir(), trusted: None, diff --git a/crates/workspace/src/tasks.rs b/crates/workspace/src/tasks.rs index 0ebb97b9d75543986bb6727546aad872a11a4f87..98421365532a8fdd4fc36f0f5c68e83b0814ae8e 100644 --- a/crates/workspace/src/tasks.rs +++ b/crates/workspace/src/tasks.rs @@ -1,13 +1,14 @@ use std::process::ExitStatus; use anyhow::Result; +use collections::HashSet; use gpui::{AppContext, Context, Entity, Task}; use language::Buffer; use project::{TaskSourceKind, WorktreeId}; use remote::ConnectionState; use task::{ DebugScenario, ResolvedTask, SaveStrategy, SharedTaskContext, SpawnInTerminal, TaskContext, - TaskTemplate, + TaskHook, TaskTemplate, TaskVariables, VariableName, }; use ui::Window; use util::TryFutureExt; @@ -164,6 +165,111 @@ impl Workspace { Task::ready(None) } } + + pub fn run_create_worktree_tasks(&mut self, window: &mut Window, cx: &mut Context) { + let project = self.project().clone(); + let hooks = HashSet::from_iter([TaskHook::CreateWorktree]); + + let worktree_tasks: Vec<(WorktreeId, TaskContext, Vec)> = { + let project = project.read(cx); + let task_store = project.task_store(); + let Some(inventory) = task_store.read(cx).task_inventory().cloned() else { + return; + }; + + let git_store = project.git_store().read(cx); + + let mut worktree_tasks = Vec::new(); + for worktree in project.worktrees(cx) { + let worktree = worktree.read(cx); + let worktree_id = worktree.id(); + let worktree_abs_path = worktree.abs_path(); + + let templates: Vec = inventory + .read(cx) + .templates_with_hooks(&hooks, worktree_id) + .into_iter() + .map(|(_, template)| template) + .collect(); + + if templates.is_empty() { + continue; + } + + let mut task_variables = TaskVariables::default(); + task_variables.insert( + VariableName::WorktreeRoot, + worktree_abs_path.to_string_lossy().into_owned(), + ); + + if let Some(path) = git_store.original_repo_path_for_worktree(worktree_id, cx) { + task_variables.insert( + VariableName::MainGitWorktree, + path.to_string_lossy().into_owned(), + ); + } + + let task_context = TaskContext { + cwd: Some(worktree_abs_path.to_path_buf()), + task_variables, + project_env: Default::default(), + }; + + worktree_tasks.push((worktree_id, task_context, templates)); + } + worktree_tasks + }; + + if worktree_tasks.is_empty() { + return; + } + + let task = cx.spawn_in(window, async move |workspace, cx| { + let mut tasks = Vec::new(); + for (worktree_id, task_context, templates) in worktree_tasks { + let id_base = format!("worktree_setup_{worktree_id}"); + + tasks.push(cx.spawn({ + let workspace = workspace.clone(); + async move |cx| { + for task_template in templates { + let Some(resolved) = + task_template.resolve_task(&id_base, &task_context) + else { + continue; + }; + + let status = workspace.update_in(cx, |workspace, window, cx| { + workspace.spawn_in_terminal(resolved.resolved, window, cx) + })?; + + if let Some(result) = status.await { + match result { + Ok(exit_status) if !exit_status.success() => { + log::error!( + "Git worktree setup task failed with status: {:?}", + exit_status.code() + ); + break; + } + Err(error) => { + log::error!("Git worktree setup task error: {error:#}"); + break; + } + _ => {} + } + } + } + anyhow::Ok(()) + } + })); + } + + futures::future::join_all(tasks).await; + anyhow::Ok(()) + }); + task.detach_and_log_err(cx); + } } #[cfg(test)] diff --git a/crates/workspace/src/welcome.rs b/crates/workspace/src/welcome.rs index efd9b75a6802f888f43654e21006f202cc36c5a4..dceca3e85f4308952563e689c608c92e9f77144f 100644 --- a/crates/workspace/src/welcome.rs +++ b/crates/workspace/src/welcome.rs @@ -326,7 +326,7 @@ impl WelcomePage { self.workspace .update(cx, |workspace, cx| { workspace - .open_workspace_for_paths(OpenMode::Replace, paths, window, cx) + .open_workspace_for_paths(OpenMode::Activate, paths, window, cx) .detach_and_log_err(cx); }) .log_err(); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index ae05c2c59012b2caf217ac54a80b377aee87f09d..7979ffe828cbf8c4da5a40a29eaa6537f1433c3c 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -19,6 +19,7 @@ mod security_modal; pub mod shared_screen; use db::smol::future::yield_now; pub use shared_screen::SharedScreen; +pub mod focus_follows_mouse; mod status_bar; pub mod tasks; mod theme_preview; @@ -31,8 +32,8 @@ pub use crate::notifications::NotificationFrame; pub use dock::Panel; pub use multi_workspace::{ CloseWorkspaceSidebar, DraggedSidebar, FocusWorkspaceSidebar, MultiWorkspace, - MultiWorkspaceEvent, NextWorkspace, PreviousWorkspace, Sidebar, SidebarEvent, SidebarHandle, - SidebarRenderState, SidebarSide, ToggleWorkspaceSidebar, sidebar_side_context_menu, + MultiWorkspaceEvent, Sidebar, SidebarEvent, SidebarHandle, SidebarRenderState, SidebarSide, + ToggleWorkspaceSidebar, sidebar_side_context_menu, }; pub use path_list::{PathList, SerializedPathList}; pub use toast_layer::{ToastAction, ToastLayer, ToastView}; @@ -83,15 +84,15 @@ use persistence::{SerializedWindowBounds, model::SerializedWorkspace}; pub use persistence::{ WorkspaceDb, delete_unloaded_items, model::{ - DockStructure, ItemId, SerializedMultiWorkspace, SerializedWorkspaceLocation, - SessionWorkspace, + DockStructure, ItemId, MultiWorkspaceState, SerializedMultiWorkspace, + SerializedWorkspaceLocation, SessionWorkspace, }, read_serialized_multi_workspaces, resolve_worktree_workspaces, }; use postage::stream::Stream; use project::{ - DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId, - WorktreeSettings, + DirectoryLister, Project, ProjectEntryId, ProjectGroupKey, ProjectPath, ResolvedPath, Worktree, + WorktreeId, WorktreeSettings, debugger::{breakpoint_store::BreakpointStoreEvent, session::ThreadStatus}, project_settings::ProjectSettings, toolchain_store::ToolchainStoreEvent, @@ -147,8 +148,8 @@ use util::{ }; use uuid::Uuid; pub use workspace_settings::{ - AutosaveSetting, BottomDockLayout, RestoreOnStartupBehavior, StatusBarSettings, TabBarSettings, - WorkspaceSettings, + AutosaveSetting, BottomDockLayout, FocusFollowsMouse, RestoreOnStartupBehavior, + StatusBarSettings, TabBarSettings, WorkspaceSettings, }; use zed_actions::{Spawn, feedback::FileBugReport, theme::ToggleMode}; @@ -655,13 +656,25 @@ impl From for i64 { } } -fn prompt_and_open_paths(app_state: Arc, options: PathPromptOptions, cx: &mut App) { +fn prompt_and_open_paths( + app_state: Arc, + options: PathPromptOptions, + create_new_window: bool, + cx: &mut App, +) { if let Some(workspace_window) = local_workspace_windows(cx).into_iter().next() { workspace_window .update(cx, |multi_workspace, window, cx| { let workspace = multi_workspace.workspace().clone(); workspace.update(cx, |workspace, cx| { - prompt_for_open_path_and_open(workspace, app_state, options, true, window, cx); + prompt_for_open_path_and_open( + workspace, + app_state, + options, + create_new_window, + window, + cx, + ); }); }) .ok(); @@ -672,7 +685,7 @@ fn prompt_and_open_paths(app_state: Arc, options: PathPromptOptions, c None, None, None, - OpenMode::Replace, + OpenMode::Activate, cx, ); cx.spawn(async move |cx| { @@ -681,7 +694,14 @@ fn prompt_and_open_paths(app_state: Arc, options: PathPromptOptions, c window.activate_window(); let workspace = multi_workspace.workspace().clone(); workspace.update(cx, |workspace, cx| { - prompt_for_open_path_and_open(workspace, app_state, options, true, window, cx); + prompt_for_open_path_and_open( + workspace, + app_state, + options, + create_new_window, + window, + cx, + ); }); })?; anyhow::Ok(()) @@ -713,7 +733,7 @@ pub fn prompt_for_open_path_and_open( if let Some(handle) = multi_workspace_handle { if let Some(task) = handle .update(cx, |multi_workspace, window, cx| { - multi_workspace.open_project(paths, OpenMode::Replace, window, cx) + multi_workspace.open_project(paths, OpenMode::Activate, window, cx) }) .log_err() { @@ -742,7 +762,7 @@ pub fn init(app_state: Arc, cx: &mut App) { cx.on_action(|_: &CloseWindow, cx| Workspace::close_global(cx)) .on_action(|_: &Reload, cx| reload(cx)) - .on_action(|_: &Open, cx: &mut App| { + .on_action(|action: &Open, cx: &mut App| { let app_state = AppState::global(cx); prompt_and_open_paths( app_state, @@ -752,6 +772,7 @@ pub fn init(app_state: Arc, cx: &mut App) { multiple: true, prompt: None, }, + action.create_new_window, cx, ); }) @@ -766,6 +787,7 @@ pub fn init(app_state: Arc, cx: &mut App) { multiple: true, prompt: None, }, + true, cx, ); }); @@ -1344,6 +1366,8 @@ pub struct Workspace { scheduled_tasks: Vec>, last_open_dock_positions: Vec, removing: bool, + open_in_dev_container: bool, + _dev_container_task: Option>>, _panels_task: Option>>, sidebar_focus_handle: Option, multi_workspace: Option>, @@ -1378,8 +1402,6 @@ pub enum OpenMode { /// Add to the window's multi workspace and activate it. #[default] Activate, - /// Replace the currently active workspace, and any of it's linked workspaces - Replace, } impl Workspace { @@ -1778,6 +1800,8 @@ impl Workspace { removing: false, sidebar_focus_handle: None, multi_workspace, + open_in_dev_container: false, + _dev_container_task: None, } } @@ -1917,9 +1941,6 @@ impl Workspace { workspace }); match open_mode { - OpenMode::Replace => { - multi_workspace.replace(workspace.clone(), &*window, cx); - } OpenMode::Activate => { multi_workspace.activate(workspace.clone(), window, cx); } @@ -2052,6 +2073,10 @@ impl Workspace { }) } + pub fn project_group_key(&self, cx: &App) -> ProjectGroupKey { + self.project.read(cx).project_group_key(cx) + } + pub fn weak_handle(&self) -> WeakEntity { self.weak_self.clone() } @@ -2800,6 +2825,18 @@ impl Workspace { self.debugger_provider = Some(Arc::new(provider)); } + pub fn set_open_in_dev_container(&mut self, value: bool) { + self.open_in_dev_container = value; + } + + pub fn open_in_dev_container(&self) -> bool { + self.open_in_dev_container + } + + pub fn set_dev_container_task(&mut self, task: Task>) { + self._dev_container_task = Some(task); + } + pub fn debugger_provider(&self) -> Option> { self.debugger_provider.clone() } @@ -3026,7 +3063,6 @@ impl Workspace { self.project.read(cx).visible_worktrees(cx) } - #[cfg(any(test, feature = "test-support"))] pub fn worktree_scans_complete(&self, cx: &App) -> impl Future + 'static + use<> { let futures = self .worktrees(cx) @@ -3394,7 +3430,7 @@ impl Workspace { let workspace_is_empty = !is_remote && !has_worktree && !has_dirty_items; if workspace_is_empty { - open_mode = OpenMode::Replace; + open_mode = OpenMode::Activate; } let app_state = self.app_state.clone(); @@ -5528,7 +5564,9 @@ impl Workspace { if let Some(project_id) = other_project_id { let app_state = self.app_state.clone(); crate::join_in_room_project(project_id, remote_participant.user.id, app_state, cx) - .detach_and_log_err(cx); + .detach_and_prompt_err("Failed to join project", window, cx, |error, _, _| { + Some(format!("{error:#}")) + }); } } @@ -7676,11 +7714,6 @@ impl GlobalAnyActiveCall { } } -pub fn merge_conflict_notification_id() -> NotificationId { - struct MergeConflictNotification; - NotificationId::unique::() -} - /// Workspace-local view of a remote participant's location. #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ParticipantLocation { @@ -8604,30 +8637,32 @@ pub async fn last_session_workspace_locations( .log_err() } -pub struct MultiWorkspaceRestoreResult { - pub window_handle: WindowHandle, - pub errors: Vec, -} - pub async fn restore_multiworkspace( multi_workspace: SerializedMultiWorkspace, app_state: Arc, cx: &mut AsyncApp, -) -> anyhow::Result { - let SerializedMultiWorkspace { workspaces, state } = multi_workspace; - let mut group_iter = workspaces.into_iter(); - let first = group_iter - .next() - .context("window group must not be empty")?; - - let window_handle = if first.paths.is_empty() { - cx.update(|cx| open_workspace_by_id(first.workspace_id, app_state.clone(), None, cx)) - .await? +) -> anyhow::Result> { + let SerializedMultiWorkspace { + active_workspace, + state, + } = multi_workspace; + let MultiWorkspaceState { + sidebar_open, + project_group_keys, + sidebar_state, + .. + } = state; + + let window_handle = if active_workspace.paths.is_empty() { + cx.update(|cx| { + open_workspace_by_id(active_workspace.workspace_id, app_state.clone(), None, cx) + }) + .await? } else { let OpenResult { window, .. } = cx .update(|cx| { Workspace::new_local( - first.paths.paths().to_vec(), + active_workspace.paths.paths().to_vec(), app_state.clone(), None, None, @@ -8640,65 +8675,17 @@ pub async fn restore_multiworkspace( window }; - let mut errors = Vec::new(); - - for session_workspace in group_iter { - let error = if session_workspace.paths.is_empty() { - cx.update(|cx| { - open_workspace_by_id( - session_workspace.workspace_id, - app_state.clone(), - Some(window_handle), - cx, - ) - }) - .await - .err() - } else { - cx.update(|cx| { - Workspace::new_local( - session_workspace.paths.paths().to_vec(), - app_state.clone(), - Some(window_handle), - None, - None, - OpenMode::Add, - cx, - ) - }) - .await - .err() - }; - - if let Some(error) = error { - errors.push(error); - } - } - - if let Some(target_id) = state.active_workspace_id { + if !project_group_keys.is_empty() { + let restored_keys: Vec = + project_group_keys.into_iter().map(Into::into).collect(); window_handle - .update(cx, |multi_workspace, window, cx| { - let target_index = multi_workspace - .workspaces() - .iter() - .position(|ws| ws.read(cx).database_id() == Some(target_id)); - let index = target_index.unwrap_or(0); - if let Some(workspace) = multi_workspace.workspaces().get(index).cloned() { - multi_workspace.activate(workspace, window, cx); - } - }) - .ok(); - } else { - window_handle - .update(cx, |multi_workspace, window, cx| { - if let Some(workspace) = multi_workspace.workspaces().first().cloned() { - multi_workspace.activate(workspace, window, cx); - } + .update(cx, |multi_workspace, _window, _cx| { + multi_workspace.restore_project_group_keys(restored_keys); }) .ok(); } - if state.sidebar_open { + if sidebar_open { window_handle .update(cx, |multi_workspace, _, cx| { multi_workspace.open_sidebar(cx); @@ -8706,8 +8693,7 @@ pub async fn restore_multiworkspace( .ok(); } - if let Some(sidebar_state) = &state.sidebar_state { - let sidebar_state = sidebar_state.clone(); + if let Some(sidebar_state) = sidebar_state { window_handle .update(cx, |multi_workspace, window, cx| { if let Some(sidebar) = multi_workspace.sidebar() { @@ -8724,10 +8710,7 @@ pub async fn restore_multiworkspace( }) .ok(); - Ok(MultiWorkspaceRestoreResult { - window_handle, - errors, - }) + Ok(window_handle) } actions!( @@ -9096,7 +9079,7 @@ pub fn workspace_windows_for_location( }; multi_workspace.read(cx).is_ok_and(|multi_workspace| { - multi_workspace.workspaces().iter().any(|workspace| { + multi_workspace.workspaces().any(|workspace| { match workspace.read(cx).workspace_location(cx) { WorkspaceLocation::Location(location, _) => { match (&location, serialized_location) { @@ -9212,6 +9195,7 @@ pub struct OpenOptions { pub requesting_window: Option>, pub open_mode: OpenMode, pub env: Option>, + pub open_in_dev_container: bool, } /// The result of opening a workspace via [`open_paths`], [`Workspace::new_local`], @@ -9341,7 +9325,7 @@ pub fn open_workspace_by_id( pub fn open_paths( abs_paths: &[PathBuf], app_state: Arc, - open_options: OpenOptions, + mut open_options: OpenOptions, cx: &mut App, ) -> Task> { let abs_paths = abs_paths.to_vec(); @@ -9366,10 +9350,9 @@ pub fn open_paths( let all_metadatas = futures::future::join_all(all_paths) .await .into_iter() - .filter_map(|result| result.ok().flatten()) - .collect::>(); + .filter_map(|result| result.ok().flatten()); - if all_metadatas.iter().all(|file| !file.is_dir) { + if all_metadatas.into_iter().all(|file| !file.is_dir) { cx.update(|cx| { let windows = workspace_windows_for_location( &SerializedWorkspaceLocation::Local, @@ -9391,12 +9374,46 @@ pub fn open_paths( } } + // Fallback for directories: when no flag is specified and no existing + // workspace matched, add the directory as a new workspace in the + // active window's MultiWorkspace (instead of opening a new window). + if open_options.open_new_workspace.is_none() && existing.is_none() { + let target_window = cx.update(|cx| { + let windows = workspace_windows_for_location( + &SerializedWorkspaceLocation::Local, + cx, + ); + let window = cx + .active_window() + .and_then(|window| window.downcast::()) + .filter(|window| windows.contains(window)) + .or_else(|| windows.into_iter().next()); + window.filter(|window| { + window.read(cx).is_ok_and(|mw| mw.multi_workspace_enabled(cx)) + }) + }); + + if let Some(window) = target_window { + open_options.requesting_window = Some(window); + window + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .log_err(); + } + } + + let open_in_dev_container = open_options.open_in_dev_container; + let result = if let Some((existing, target_workspace)) = existing { let open_task = existing .update(cx, |multi_workspace, window, cx| { window.activate_window(); multi_workspace.activate(target_workspace.clone(), window, cx); target_workspace.update(cx, |workspace, cx| { + if open_in_dev_container { + workspace.set_open_in_dev_container(true); + } workspace.open_paths( abs_paths, OpenOptions { @@ -9424,6 +9441,13 @@ pub fn open_paths( Ok(OpenResult { window: existing, workspace: target_workspace, opened_items: open_task }) } else { + let init = if open_in_dev_container { + Some(Box::new(|workspace: &mut Workspace, _window: &mut Window, _cx: &mut Context| { + workspace.set_open_in_dev_container(true); + }) as Box) + Send>) + } else { + None + }; let result = cx .update(move |cx| { Workspace::new_local( @@ -9431,7 +9455,7 @@ pub fn open_paths( app_state.clone(), open_options.requesting_window, open_options.env, - None, + init, open_options.open_mode, cx, ) @@ -10717,6 +10741,12 @@ mod tests { cx.add_window(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); cx.run_until_parked(); + multi_workspace_handle + .update(cx, |mw, _window, cx| { + mw.open_sidebar(cx); + }) + .unwrap(); + let workspace_a = multi_workspace_handle .read_with(cx, |mw, _| mw.workspace().clone()) .unwrap(); @@ -10730,7 +10760,7 @@ mod tests { // Activate workspace A multi_workspace_handle .update(cx, |mw, window, cx| { - let workspace = mw.workspaces()[0].clone(); + let workspace = mw.workspaces().next().unwrap().clone(); mw.activate(workspace, window, cx); }) .unwrap(); @@ -10752,7 +10782,7 @@ mod tests { // Verify workspace A is active multi_workspace_handle .read_with(cx, |mw, _| { - assert_eq!(mw.active_workspace_index(), 0); + assert_eq!(mw.workspace(), &workspace_a); }) .unwrap(); @@ -10768,8 +10798,8 @@ mod tests { multi_workspace_handle .read_with(cx, |mw, _| { assert_eq!( - mw.active_workspace_index(), - 1, + mw.workspace(), + &workspace_b, "workspace B should be activated when it prompts" ); }) @@ -14487,6 +14517,12 @@ mod tests { cx.add_window(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); cx.run_until_parked(); + multi_workspace_handle + .update(cx, |mw, _window, cx| { + mw.open_sidebar(cx); + }) + .unwrap(); + let workspace_a = multi_workspace_handle .read_with(cx, |mw, _| mw.workspace().clone()) .unwrap(); @@ -14500,7 +14536,7 @@ mod tests { // Switch to workspace A multi_workspace_handle .update(cx, |mw, window, cx| { - let workspace = mw.workspaces()[0].clone(); + let workspace = mw.workspaces().next().unwrap().clone(); mw.activate(workspace, window, cx); }) .unwrap(); @@ -14546,7 +14582,7 @@ mod tests { // Switch to workspace B multi_workspace_handle .update(cx, |mw, window, cx| { - let workspace = mw.workspaces()[1].clone(); + let workspace = mw.workspaces().nth(1).unwrap().clone(); mw.activate(workspace, window, cx); }) .unwrap(); @@ -14555,7 +14591,7 @@ mod tests { // Switch back to workspace A multi_workspace_handle .update(cx, |mw, window, cx| { - let workspace = mw.workspaces()[0].clone(); + let workspace = mw.workspaces().next().unwrap().clone(); mw.activate(workspace, window, cx); }) .unwrap(); diff --git a/crates/workspace/src/workspace_settings.rs b/crates/workspace/src/workspace_settings.rs index d78b233229800b571ccc37f87719d09125f1c4c3..ee0e80336d744cadaecdf0201525deddb8d5eec9 100644 --- a/crates/workspace/src/workspace_settings.rs +++ b/crates/workspace/src/workspace_settings.rs @@ -1,4 +1,4 @@ -use std::num::NonZeroUsize; +use std::{num::NonZeroUsize, time::Duration}; use crate::DockPosition; use collections::HashMap; @@ -35,6 +35,13 @@ pub struct WorkspaceSettings { pub use_system_window_tabs: bool, pub zoomed_padding: bool, pub window_decorations: settings::WindowDecorations, + pub focus_follows_mouse: FocusFollowsMouse, +} + +#[derive(Copy, Clone, Deserialize)] +pub struct FocusFollowsMouse { + pub enabled: bool, + pub debounce: Duration, } #[derive(Copy, Clone, PartialEq, Debug, Default)] @@ -113,6 +120,20 @@ impl Settings for WorkspaceSettings { use_system_window_tabs: workspace.use_system_window_tabs.unwrap(), zoomed_padding: workspace.zoomed_padding.unwrap(), window_decorations: workspace.window_decorations.unwrap(), + focus_follows_mouse: FocusFollowsMouse { + enabled: workspace + .focus_follows_mouse + .unwrap() + .enabled + .unwrap_or(false), + debounce: Duration::from_millis( + workspace + .focus_follows_mouse + .unwrap() + .debounce_ms + .unwrap_or(250), + ), + }, } } } diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index b08f9aaee016d7047b06bf9ac4a4a1ce2b2d1ad8..864858073db70c984e61dbf43bf98be44f6c1c58 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -176,6 +176,7 @@ pub struct Snapshot { root_char_bag: CharBag, entries_by_path: SumTree, entries_by_id: SumTree, + root_repo_common_dir: Option>, always_included_entries: Vec>, /// A number that increases every time the worktree begins scanning @@ -368,6 +369,7 @@ struct UpdateObservationState { pub enum Event { UpdatedEntries(UpdatedEntriesSet), UpdatedGitRepositories(UpdatedGitRepositoriesSet), + UpdatedRootRepoCommonDir, DeletedEntry(ProjectEntryId), /// The worktree root itself has been deleted (for single-file worktrees) Deleted, @@ -407,6 +409,10 @@ impl Worktree { None }; + let root_repo_common_dir = discover_root_repo_common_dir(&abs_path, fs.as_ref()) + .await + .map(SanitizedPath::from_arc); + Ok(cx.new(move |cx: &mut Context| { let mut snapshot = LocalSnapshot { ignores_by_parent_abs_path: Default::default(), @@ -426,6 +432,7 @@ impl Worktree { ), root_file_handle, }; + snapshot.root_repo_common_dir = root_repo_common_dir; let worktree_id = snapshot.id(); let settings_location = Some(SettingsLocation { @@ -564,6 +571,7 @@ impl Worktree { this.update(cx, |this, cx| { let mut entries_changed = false; let this = this.as_remote_mut().unwrap(); + let old_root_repo_common_dir = this.snapshot.root_repo_common_dir.clone(); { let mut lock = this.background_snapshot.lock(); this.snapshot = lock.0.clone(); @@ -579,6 +587,9 @@ impl Worktree { if entries_changed { cx.emit(Event::UpdatedEntries(Arc::default())); } + if this.snapshot.root_repo_common_dir != old_root_repo_common_dir { + cx.emit(Event::UpdatedRootRepoCommonDir); + } cx.notify(); while let Some((scan_id, _)) = this.snapshot_subscriptions.front() { if this.observed_snapshot(*scan_id) { @@ -1183,6 +1194,13 @@ impl LocalWorktree { cx: &mut Context, ) { let repo_changes = self.changed_repos(&self.snapshot, &mut new_snapshot); + + new_snapshot.root_repo_common_dir = new_snapshot + .local_repo_for_work_directory_path(RelPath::empty()) + .map(|repo| SanitizedPath::from_arc(repo.common_dir_abs_path.clone())); + + let root_repo_common_dir_changed = + self.snapshot.root_repo_common_dir != new_snapshot.root_repo_common_dir; self.snapshot = new_snapshot; if let Some(share) = self.update_observer.as_mut() { @@ -1198,6 +1216,9 @@ impl LocalWorktree { if !repo_changes.is_empty() { cx.emit(Event::UpdatedGitRepositories(repo_changes)); } + if root_repo_common_dir_changed { + cx.emit(Event::UpdatedRootRepoCommonDir); + } while let Some((scan_id, _)) = self.snapshot_subscriptions.front() { if self.snapshot.completed_scan_id >= *scan_id { @@ -2216,6 +2237,7 @@ impl Snapshot { always_included_entries: Default::default(), entries_by_path: Default::default(), entries_by_id: Default::default(), + root_repo_common_dir: None, scan_id: 1, completed_scan_id: 0, } @@ -2241,6 +2263,12 @@ impl Snapshot { SanitizedPath::cast_arc_ref(&self.abs_path) } + pub fn root_repo_common_dir(&self) -> Option<&Arc> { + self.root_repo_common_dir + .as_ref() + .map(SanitizedPath::cast_arc_ref) + } + fn build_initial_update(&self, project_id: u64, worktree_id: u64) -> proto::UpdateWorktree { let mut updated_entries = self .entries_by_path @@ -2254,6 +2282,9 @@ impl Snapshot { worktree_id, abs_path: self.abs_path().to_string_lossy().into_owned(), root_name: self.root_name().to_proto(), + root_repo_common_dir: self + .root_repo_common_dir() + .map(|p| p.to_string_lossy().into_owned()), updated_entries, removed_entries: Vec::new(), scan_id: self.scan_id as u64, @@ -2399,6 +2430,10 @@ impl Snapshot { self.entries_by_path.edit(entries_by_path_edits, ()); self.entries_by_id.edit(entries_by_id_edits, ()); + self.root_repo_common_dir = update + .root_repo_common_dir + .map(|p| SanitizedPath::new_arc(Path::new(&p))); + self.scan_id = update.scan_id as usize; if update.is_last_update { self.completed_scan_id = update.scan_id as usize; @@ -2627,6 +2662,9 @@ impl LocalSnapshot { worktree_id, abs_path: self.abs_path().to_string_lossy().into_owned(), root_name: self.root_name().to_proto(), + root_repo_common_dir: self + .root_repo_common_dir() + .map(|p| p.to_string_lossy().into_owned()), updated_entries, removed_entries, scan_id: self.scan_id as u64, @@ -6071,6 +6109,16 @@ fn parse_gitfile(content: &str) -> anyhow::Result<&Path> { Ok(Path::new(path.trim())) } +async fn discover_root_repo_common_dir(root_abs_path: &Path, fs: &dyn Fs) -> Option> { + let root_dot_git = root_abs_path.join(DOT_GIT); + if !fs.metadata(&root_dot_git).await.is_ok_and(|m| m.is_some()) { + return None; + } + let dot_git_path: Arc = root_dot_git.into(); + let (_, common_dir) = discover_git_paths(&dot_git_path, fs).await; + Some(common_dir) +} + async fn discover_git_paths(dot_git_abs_path: &Arc, fs: &dyn Fs) -> (Arc, Arc) { let mut repository_dir_abs_path = dot_git_abs_path.clone(); let mut common_dir_abs_path = dot_git_abs_path.clone(); diff --git a/crates/worktree/tests/integration/main.rs b/crates/worktree/tests/integration/main.rs index cd7dd1c9056a7d501bec2bcd7b07d596f689a908..b8d1994b1dc3f8ddbd482dd0863e3441ab7adc64 100644 --- a/crates/worktree/tests/integration/main.rs +++ b/crates/worktree/tests/integration/main.rs @@ -2736,6 +2736,97 @@ fn check_worktree_entries( } } +#[gpui::test] +async fn test_root_repo_common_dir(executor: BackgroundExecutor, cx: &mut TestAppContext) { + init_test(cx); + + use git::repository::Worktree as GitWorktree; + + let fs = FakeFs::new(executor); + + // Set up a main repo and a linked worktree pointing back to it. + fs.insert_tree( + path!("/main_repo"), + json!({ + ".git": {}, + "file.txt": "content", + }), + ) + .await; + fs.add_linked_worktree_for_repo( + Path::new(path!("/main_repo/.git")), + false, + GitWorktree { + path: PathBuf::from(path!("/linked_worktree")), + ref_name: Some("refs/heads/feature".into()), + sha: "abc123".into(), + is_main: false, + }, + ) + .await; + fs.write( + path!("/linked_worktree/file.txt").as_ref(), + "content".as_bytes(), + ) + .await + .unwrap(); + + let tree = Worktree::local( + path!("/linked_worktree").as_ref(), + true, + fs.clone(), + Arc::default(), + true, + WorktreeId::from_proto(0), + &mut cx.to_async(), + ) + .await + .unwrap(); + tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + cx.run_until_parked(); + + // For a linked worktree, root_repo_common_dir should point to the + // main repo's .git, not the worktree-specific git directory. + tree.read_with(cx, |tree, _| { + assert_eq!( + tree.snapshot().root_repo_common_dir().map(|p| p.as_ref()), + Some(Path::new(path!("/main_repo/.git"))), + ); + }); + + let event_count: Rc> = Rc::new(Cell::new(0)); + tree.update(cx, { + let event_count = event_count.clone(); + |_, cx| { + cx.subscribe(&cx.entity(), move |_, _, event, _| { + if matches!(event, Event::UpdatedRootRepoCommonDir) { + event_count.set(event_count.get() + 1); + } + }) + .detach(); + } + }); + + // Remove .git — root_repo_common_dir should become None. + fs.remove_file( + &PathBuf::from(path!("/linked_worktree/.git")), + Default::default(), + ) + .await + .unwrap(); + tree.flush_fs_events(cx).await; + + tree.read_with(cx, |tree, _| { + assert_eq!(tree.snapshot().root_repo_common_dir(), None); + }); + assert_eq!( + event_count.get(), + 1, + "should have emitted UpdatedRootRepoCommonDir on removal" + ); +} + fn init_test(cx: &mut gpui::TestAppContext) { zlog::init_test(); diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 303f21b8ffa62f9d9f380d9c18beecd77775df20..5937b91665b892084aa7b4d1f8b94ec1e2d864da 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -10,7 +10,7 @@ use agent_ui::AgentPanel; use anyhow::{Context as _, Error, Result}; use clap::Parser; use cli::FORCE_CLI_MODE_ENV_VAR_NAME; -use client::{Client, ProxySettings, UserStore, parse_zed_link}; +use client::{Client, ProxySettings, RefreshLlmTokenListener, UserStore, parse_zed_link}; use collab_ui::channel_view::ChannelView; use collections::HashMap; use crashes::InitCrashHandler; @@ -664,7 +664,12 @@ fn main() { ); copilot_ui::init(&app_state, cx); - language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx); + language_model::init(cx); + RefreshLlmTokenListener::register( + app_state.client.clone(), + app_state.user_store.clone(), + cx, + ); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); acp_tools::init(cx); zed::telemetry_log::init(cx); @@ -857,13 +862,13 @@ fn main() { diff_paths, wsl, diff_all: diff_all_mode, + dev_container: args.dev_container, }) } match open_rx - .try_next() + .try_recv() .ok() - .flatten() .and_then(|request| OpenRequest::parse(request, cx).log_err()) { Some(request) => { @@ -1208,6 +1213,7 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut } let mut task = None; + let dev_container = request.dev_container; if !request.open_paths.is_empty() || !request.diff_paths.is_empty() { let app_state = app_state.clone(); task = Some(cx.spawn(async move |cx| { @@ -1218,7 +1224,10 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut &request.diff_paths, request.diff_all, app_state, - workspace::OpenOptions::default(), + workspace::OpenOptions { + open_in_dev_container: dev_container, + ..Default::default() + }, cx, ) .await?; @@ -1354,16 +1363,10 @@ pub(crate) async fn restore_or_create_workspace( let mut tasks = Vec::new(); for multi_workspace in multi_workspaces { - match restore_multiworkspace(multi_workspace, app_state.clone(), cx).await { - Ok(result) => { - for error in result.errors { - log::error!("Failed to restore workspace in group: {error:#}"); - results.push(Err(error)); - } - } - Err(e) => { - results.push(Err(e)); - } + if let Err(error) = restore_multiworkspace(multi_workspace, app_state.clone(), cx).await + { + log::error!("Failed to restore workspace: {error:#}"); + results.push(Err(error)); } } @@ -1636,6 +1639,13 @@ struct Args { #[arg(long, value_name = "USER@DISTRO")] wsl: Option, + /// Open the project in a dev container. + /// + /// Automatically triggers "Reopen in Dev Container" if a `.devcontainer/` + /// configuration is found in the project directory. + #[arg(long)] + dev_container: bool, + /// Instructs zed to run as a dev server on this machine. (not implemented) #[arg(long)] dev_server_token: Option, diff --git a/crates/zed/src/visual_test_runner.rs b/crates/zed/src/visual_test_runner.rs index dcd2ec81ee44a091c9b444e79177145cf5cfceb7..b1082fadd089323c77ac5197b75653fec23863c8 100644 --- a/crates/zed/src/visual_test_runner.rs +++ b/crates/zed/src/visual_test_runner.rs @@ -109,7 +109,7 @@ use { image::RgbaImage, project::{AgentId, Project}, project_panel::ProjectPanel, - settings::{NotifyWhenAgentWaiting, Settings as _}, + settings::{NotifyWhenAgentWaiting, PlaySoundWhenAgentDone, Settings as _}, settings_ui::SettingsWindow, std::{ any::Any, @@ -201,7 +201,12 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> }); prompt_store::init(cx); let prompt_builder = prompt_store::PromptBuilder::load(app_state.fs.clone(), false, cx); - language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx); + language_model::init(cx); + client::RefreshLlmTokenListener::register( + app_state.client.clone(), + app_state.user_store.clone(), + cx, + ); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); git_ui::init(cx); project::AgentRegistryStore::init_global( @@ -231,7 +236,7 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> agent_settings::AgentSettings::override_global( agent_settings::AgentSettings { notify_when_agent_waiting: NotifyWhenAgentWaiting::Never, - play_sound_when_agent_done: false, + play_sound_when_agent_done: PlaySoundWhenAgentDone::Never, ..agent_settings::AgentSettings::get_global(cx).clone() }, cx, @@ -2087,7 +2092,7 @@ fn run_agent_thread_view_test( let mut tool_content: Vec = Vec::new(); let mut tool_locations: Vec = Vec::new(); - while let Ok(Some(event)) = event_receiver.try_next() { + while let Ok(event) = event_receiver.try_recv() { if let Ok(agent::ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateFields( update, ))) = event @@ -2622,7 +2627,7 @@ fn run_multi_workspace_sidebar_visual_tests( // Add worktree to workspace 1 (index 0) so it shows as "private-test-remote" let add_worktree1_task = multi_workspace_window .update(cx, |multi_workspace, _window, cx| { - let workspace1 = &multi_workspace.workspaces()[0]; + let workspace1 = multi_workspace.workspaces().next().unwrap(); let project = workspace1.read(cx).project().clone(); project.update(cx, |project, cx| { project.find_or_create_worktree(&workspace1_dir, true, cx) @@ -2641,7 +2646,7 @@ fn run_multi_workspace_sidebar_visual_tests( // Add worktree to workspace 2 (index 1) so it shows as "zed" let add_worktree2_task = multi_workspace_window .update(cx, |multi_workspace, _window, cx| { - let workspace2 = &multi_workspace.workspaces()[1]; + let workspace2 = multi_workspace.workspaces().nth(1).unwrap(); let project = workspace2.read(cx).project().clone(); project.update(cx, |project, cx| { project.find_or_create_worktree(&workspace2_dir, true, cx) @@ -2660,7 +2665,7 @@ fn run_multi_workspace_sidebar_visual_tests( // Switch to workspace 1 so it's highlighted as active (index 0) multi_workspace_window .update(cx, |multi_workspace, window, cx| { - let workspace = multi_workspace.workspaces()[0].clone(); + let workspace = multi_workspace.workspaces().next().unwrap().clone(); multi_workspace.activate(workspace, window, cx); }) .context("Failed to activate workspace 1")?; @@ -2688,7 +2693,7 @@ fn run_multi_workspace_sidebar_visual_tests( let save_tasks = multi_workspace_window .update(cx, |multi_workspace, _window, cx| { let thread_store = agent::ThreadStore::global(cx); - let workspaces = multi_workspace.workspaces().to_vec(); + let workspaces: Vec<_> = multi_workspace.workspaces().cloned().collect(); let mut tasks = Vec::new(); for (index, workspace) in workspaces.iter().enumerate() { @@ -3500,7 +3505,7 @@ edition = "2021" // Add the git project as a worktree let add_worktree_task = workspace_window .update(cx, |multi_workspace, _window, cx| { - let workspace = &multi_workspace.workspaces()[0]; + let workspace = multi_workspace.workspaces().next().unwrap(); let project = workspace.read(cx).project().clone(); project.update(cx, |project, cx| { project.find_or_create_worktree(&project_path, true, cx) @@ -3525,7 +3530,7 @@ edition = "2021" // Open the project panel let (weak_workspace, async_window_cx) = workspace_window .update(cx, |multi_workspace, window, cx| { - let workspace = &multi_workspace.workspaces()[0]; + let workspace = multi_workspace.workspaces().next().unwrap(); (workspace.read(cx).weak_handle(), window.to_async(cx)) }) .context("Failed to get workspace handle")?; @@ -3539,7 +3544,7 @@ edition = "2021" workspace_window .update(cx, |multi_workspace, window, cx| { - let workspace = &multi_workspace.workspaces()[0]; + let workspace = multi_workspace.workspaces().next().unwrap(); workspace.update(cx, |workspace, cx| { workspace.add_panel(project_panel, window, cx); workspace.open_panel::(window, cx); @@ -3552,7 +3557,7 @@ edition = "2021" // Open main.rs in the editor let open_file_task = workspace_window .update(cx, |multi_workspace, window, cx| { - let workspace = &multi_workspace.workspaces()[0]; + let workspace = multi_workspace.workspaces().next().unwrap(); workspace.update(cx, |workspace, cx| { let worktree = workspace.project().read(cx).worktrees(cx).next(); if let Some(worktree) = worktree { @@ -3580,7 +3585,7 @@ edition = "2021" // Load the AgentPanel let (weak_workspace, async_window_cx) = workspace_window .update(cx, |multi_workspace, window, cx| { - let workspace = &multi_workspace.workspaces()[0]; + let workspace = multi_workspace.workspaces().next().unwrap(); (workspace.read(cx).weak_handle(), window.to_async(cx)) }) .context("Failed to get workspace handle for agent panel")?; @@ -3624,7 +3629,7 @@ edition = "2021" workspace_window .update(cx, |multi_workspace, window, cx| { - let workspace = &multi_workspace.workspaces()[0]; + let workspace = multi_workspace.workspaces().next().unwrap(); workspace.update(cx, |workspace, cx| { workspace.add_panel(panel.clone(), window, cx); workspace.open_panel::(window, cx); @@ -3801,7 +3806,7 @@ edition = "2021" .is_none() }); let workspace_count = workspace_window.update(cx, |multi_workspace, _window, _cx| { - multi_workspace.workspaces().len() + multi_workspace.workspaces().count() })?; if workspace_count == 2 && status_cleared { creation_complete = true; @@ -3820,7 +3825,7 @@ edition = "2021" // error state by injecting the stub server, and shrink the panel so the // editor content is visible. workspace_window.update(cx, |multi_workspace, window, cx| { - let new_workspace = &multi_workspace.workspaces()[1]; + let new_workspace = multi_workspace.workspaces().nth(1).unwrap(); new_workspace.update(cx, |workspace, cx| { if let Some(new_panel) = workspace.panel::(cx) { new_panel.update(cx, |panel, cx| { @@ -3833,7 +3838,7 @@ edition = "2021" // Type and send a message so the thread target dropdown disappears. let new_panel = workspace_window.update(cx, |multi_workspace, _window, cx| { - let new_workspace = &multi_workspace.workspaces()[1]; + let new_workspace = multi_workspace.workspaces().nth(1).unwrap(); new_workspace.read(cx).panel::(cx) })?; if let Some(new_panel) = new_panel { @@ -3874,7 +3879,7 @@ edition = "2021" workspace_window .update(cx, |multi_workspace, _window, cx| { - let workspace = &multi_workspace.workspaces()[0]; + let workspace = multi_workspace.workspaces().next().unwrap(); let project = workspace.read(cx).project().clone(); project.update(cx, |project, cx| { let worktree_ids: Vec<_> = diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 01e2354849f3a70399c680c44bd1a3cfbeb64dc4..03e128415e1aa8390d1b95816755d3644064dada 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -33,10 +33,11 @@ use git_ui::commit_view::CommitViewToolbar; use git_ui::git_panel::GitPanel; use git_ui::project_diff::{BranchDiffToolbar, ProjectDiffToolbar}; use gpui::{ - Action, App, AppContext as _, AsyncWindowContext, Context, DismissEvent, Element, Entity, - Focusable, KeyBinding, ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString, - Task, TitlebarOptions, UpdateGlobal, WeakEntity, Window, WindowHandle, WindowKind, - WindowOptions, actions, image_cache, point, px, retain_all, + Action, App, AppContext as _, AsyncWindowContext, ClipboardItem, Context, DismissEvent, + Element, Entity, FocusHandle, Focusable, Image, ImageFormat, KeyBinding, ParentElement, + PathPromptOptions, PromptLevel, ReadGlobal, SharedString, Size, Task, TitlebarOptions, + UpdateGlobal, WeakEntity, Window, WindowBounds, WindowHandle, WindowKind, WindowOptions, + actions, image_cache, img, point, px, retain_all, }; use image_viewer::ImageInfo; use language::Capability; @@ -78,7 +79,7 @@ use std::{ use terminal_view::terminal_panel::{self, TerminalPanel}; use theme::{ActiveTheme, SystemAppearance, ThemeRegistry, deserialize_icon_theme}; use theme_settings::{ThemeSettings, load_user_theme}; -use ui::{PopoverMenuHandle, prelude::*}; +use ui::{Navigable, NavigableEntry, PopoverMenuHandle, TintColor, prelude::*}; use util::markdown::MarkdownString; use util::rel_path::RelPath; use util::{ResultExt, asset_str, maybe}; @@ -96,8 +97,8 @@ use workspace::{ }; use workspace::{Pane, notifications::DetachAndPromptErr}; use zed_actions::{ - OpenAccountSettings, OpenBrowser, OpenDocs, OpenServerSettings, OpenSettingsFile, OpenZedUrl, - Quit, + About, OpenAccountSettings, OpenBrowser, OpenDocs, OpenServerSettings, OpenSettingsFile, + OpenZedUrl, Quit, }; actions!( @@ -277,10 +278,8 @@ pub fn init(cx: &mut App) { ); }); }) - .on_action(|_: &zed_actions::About, cx| { - with_active_or_new_workspace(cx, |workspace, window, cx| { - about(workspace, window, cx); - }); + .on_action(|_: &About, cx| { + open_about_window(cx); }); } @@ -503,12 +502,15 @@ pub fn initialize_workspace(app_state: Arc, cx: &mut App) { cx.new(|_| go_to_line::cursor_position::CursorPosition::new(workspace)); let line_ending_indicator = cx.new(|_| line_ending_selector::LineEndingIndicator::default()); + let merge_conflict_indicator = + cx.new(|cx| git_ui::MergeConflictIndicator::new(workspace, cx)); workspace.status_bar().update(cx, |status_bar, cx| { status_bar.add_left_item(search_button, window, cx); status_bar.add_left_item(lsp_button, window, cx); status_bar.add_left_item(diagnostic_summary, window, cx); status_bar.add_left_item(active_file_name, window, cx); status_bar.add_left_item(activity_indicator, window, cx); + status_bar.add_left_item(merge_conflict_indicator, window, cx); status_bar.add_right_item(edit_prediction_ui, window, cx); status_bar.add_right_item(active_buffer_encoding, window, cx); status_bar.add_right_item(active_buffer_language, window, cx); @@ -1249,44 +1251,218 @@ fn initialize_pane( }); } -fn about(_: &mut Workspace, window: &mut Window, cx: &mut Context) { - use std::fmt::Write; - let release_channel = ReleaseChannel::global(cx).display_name(); - let full_version = AppVersion::global(cx); - let version = env!("CARGO_PKG_VERSION"); - let debug = if cfg!(debug_assertions) { - "(debug)" - } else { - "" - }; - let message = format!("{release_channel} {version} {debug}"); +fn open_about_window(cx: &mut App) { + fn about_window_icon(release_channel: ReleaseChannel) -> Arc { + let bytes = match release_channel { + ReleaseChannel::Dev => include_bytes!("../resources/app-icon-dev.png").as_slice(), + ReleaseChannel::Nightly => { + include_bytes!("../resources/app-icon-nightly.png").as_slice() + } + ReleaseChannel::Preview => { + include_bytes!("../resources/app-icon-preview.png").as_slice() + } + ReleaseChannel::Stable => include_bytes!("../resources/app-icon.png").as_slice(), + }; - let mut detail = AppCommitSha::try_global(cx) - .map(|sha| sha.full()) - .unwrap_or_default(); - if !detail.is_empty() { - detail.push('\n'); + Arc::new(Image::from_bytes(ImageFormat::Png, bytes.to_vec())) } - _ = write!(&mut detail, "\n{full_version}"); - let detail = Some(detail); + struct AboutWindow { + focus_handle: FocusHandle, + ok_entry: NavigableEntry, + copy_entry: NavigableEntry, + app_icon: Arc, + message: SharedString, + commit: Option, + full_version: SharedString, + } - let prompt = window.prompt( - PromptLevel::Info, - &message, - detail.as_deref(), - &["Copy", "OK"], - cx, - ); - cx.spawn(async move |_, cx| { - if let Ok(0) = prompt.await { - let content = format!("{}\n{}", message, detail.as_deref().unwrap_or("")); - cx.update(|cx| { - cx.write_to_clipboard(gpui::ClipboardItem::new_string(content)); - }); + impl AboutWindow { + fn new(cx: &mut Context) -> Self { + let release_channel = ReleaseChannel::global(cx); + let release_channel_name = release_channel.display_name(); + let full_version: SharedString = AppVersion::global(cx).to_string().into(); + let version = env!("CARGO_PKG_VERSION"); + + let debug = if cfg!(debug_assertions) { + "(debug)" + } else { + "" + }; + let message: SharedString = format!("{release_channel_name} {version} {debug}").into(); + let commit = AppCommitSha::try_global(cx) + .map(|sha| sha.full()) + .filter(|commit| !commit.is_empty()) + .map(SharedString::from); + + Self { + focus_handle: cx.focus_handle(), + ok_entry: NavigableEntry::focusable(cx), + copy_entry: NavigableEntry::focusable(cx), + app_icon: about_window_icon(release_channel), + message, + commit, + full_version, + } } - }) - .detach(); + + fn copy_details(&self, window: &mut Window, cx: &mut Context) { + let content = match self.commit.as_ref() { + Some(commit) => { + format!( + "{}\nCommit: {}\nVersion: {}", + self.message, commit, self.full_version + ) + } + None => format!("{}\nVersion: {}", self.message, self.full_version), + }; + cx.write_to_clipboard(ClipboardItem::new_string(content)); + window.remove_window(); + } + } + + impl Render for AboutWindow { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let ok_is_focused = self.ok_entry.focus_handle.contains_focused(window, cx); + let copy_is_focused = self.copy_entry.focus_handle.contains_focused(window, cx); + + Navigable::new( + v_flex() + .id("about-window") + .track_focus(&self.focus_handle) + .on_action(cx.listener(|_, _: &menu::Cancel, window, _cx| { + window.remove_window(); + })) + .min_w_0() + .size_full() + .bg(cx.theme().colors().editor_background) + .text_color(cx.theme().colors().text) + .p_4() + .when(cfg!(target_os = "macos"), |this| this.pt_10()) + .gap_4() + .text_center() + .justify_between() + .child( + v_flex() + .w_full() + .gap_2() + .items_center() + .child(img(self.app_icon.clone()).size_16().flex_none()) + .child(Headline::new(self.message.clone())) + .when_some(self.commit.clone(), |this, commit| { + this.child( + Label::new("Commit") + .color(Color::Muted) + .size(LabelSize::XSmall), + ) + .child(Label::new(commit).size(LabelSize::Small)) + }) + .child( + Label::new("Version") + .color(Color::Muted) + .size(LabelSize::XSmall), + ) + .child(Label::new(self.full_version.clone()).size(LabelSize::Small)), + ) + .child( + h_flex() + .w_full() + .gap_1() + .child( + div() + .flex_1() + .track_focus(&self.ok_entry.focus_handle) + .on_action(cx.listener(|_, _: &menu::Confirm, window, _cx| { + window.remove_window(); + })) + .child( + Button::new("ok", "Ok") + .full_width() + .style(ButtonStyle::OutlinedGhost) + .toggle_state(ok_is_focused) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) + .on_click(cx.listener(|_, _, window, _cx| { + window.remove_window(); + })), + ), + ) + .child( + div() + .flex_1() + .track_focus(&self.copy_entry.focus_handle) + .on_action(cx.listener( + |this, _: &menu::Confirm, window, cx| { + this.copy_details(window, cx); + }, + )) + .child( + Button::new("copy", "Copy") + .full_width() + .style(ButtonStyle::Tinted(TintColor::Accent)) + .toggle_state(copy_is_focused) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) + .on_click(cx.listener(|this, _event, window, cx| { + this.copy_details(window, cx); + })), + ), + ), + ) + .into_any_element(), + ) + .entry(self.ok_entry.clone()) + .entry(self.copy_entry.clone()) + } + } + + impl Focusable for AboutWindow { + fn focus_handle(&self, _cx: &App) -> FocusHandle { + self.ok_entry.focus_handle.clone() + } + } + + // Don't open about window twice + if let Some(existing) = cx + .windows() + .into_iter() + .find_map(|w| w.downcast::()) + { + existing + .update(cx, |about_window, window, cx| { + window.activate_window(); + about_window.ok_entry.focus_handle.focus(window, cx); + }) + .log_err(); + return; + } + + let window_size = Size { + width: px(440.), + height: px(300.), + }; + + cx.open_window( + WindowOptions { + titlebar: Some(TitlebarOptions { + title: Some("About Zed".into()), + appears_transparent: true, + traffic_light_position: Some(point(px(12.), px(12.))), + }), + window_bounds: Some(WindowBounds::centered(window_size, cx)), + is_resizable: false, + is_minimizable: false, + kind: WindowKind::Normal, + app_id: Some(ReleaseChannel::global(cx).app_id().to_owned()), + ..Default::default() + }, + |window, cx| { + let about_window = cx.new(AboutWindow::new); + let focus_handle = about_window.read(cx).ok_entry.focus_handle.clone(); + window.activate_window(); + focus_handle.focus(window, cx); + about_window + }, + ) + .log_err(); } #[cfg(not(target_os = "windows"))] @@ -1348,7 +1524,7 @@ fn quit(_: &Quit, cx: &mut App) { let window = *window; let workspaces = window .update(cx, |multi_workspace, _, _| { - multi_workspace.workspaces().to_vec() + multi_workspace.workspaces().cloned().collect::>() }) .log_err(); @@ -2282,7 +2458,6 @@ mod tests { .update(cx, |multi_workspace, window, cx| { let mut tasks = multi_workspace .workspaces() - .iter() .map(|workspace| { workspace.update(cx, |workspace, cx| { workspace.flush_serialization(window, cx) @@ -2430,18 +2605,33 @@ mod tests { }) .await .unwrap(); - assert_eq!(cx.read(|cx| cx.windows().len()), 2); - - // Replace existing windows - let window = cx - .update(|cx| cx.windows()[0].downcast::()) + assert_eq!(cx.read(|cx| cx.windows().len()), 1); + cx.run_until_parked(); + multi_workspace_1 + .update(cx, |multi_workspace, _window, cx| { + assert_eq!(multi_workspace.workspaces().count(), 2); + assert!(multi_workspace.sidebar_open()); + let workspace = multi_workspace.workspace().read(cx); + assert_eq!( + workspace + .worktrees(cx) + .map(|w| w.read(cx).abs_path()) + .collect::>(), + &[ + Path::new(path!("/root/c")).into(), + Path::new(path!("/root/d")).into(), + ] + ); + }) .unwrap(); + + // Opening with -n (open_new_workspace: Some(true)) still creates a new window. cx.update(|cx| { open_paths( &[PathBuf::from(path!("/root/e"))], app_state, workspace::OpenOptions { - requesting_window: Some(window), + open_new_workspace: Some(true), ..Default::default() }, cx, @@ -2451,23 +2641,6 @@ mod tests { .unwrap(); cx.background_executor.run_until_parked(); assert_eq!(cx.read(|cx| cx.windows().len()), 2); - let multi_workspace_1 = cx - .update(|cx| cx.windows()[0].downcast::()) - .unwrap(); - multi_workspace_1 - .update(cx, |multi_workspace, window, cx| { - let workspace = multi_workspace.workspace().read(cx); - assert_eq!( - workspace - .worktrees(cx) - .map(|w| w.read(cx).abs_path()) - .collect::>(), - &[Path::new(path!("/root/e")).into()] - ); - assert!(workspace.right_dock().read(cx).is_open()); - assert!(workspace.active_pane().focus_handle(cx).is_focused(window)); - }) - .unwrap(); } #[gpui::test] @@ -2548,7 +2721,6 @@ mod tests { .await .unwrap(); assert_eq!(cx.update(|cx| cx.windows().len()), 1); - let window1 = cx.update(|cx| cx.active_window().unwrap()); cx.update(|cx| { open_paths( @@ -2562,6 +2734,8 @@ mod tests { .unwrap(); assert_eq!(cx.update(|cx| cx.windows().len()), 1); + // Opening a directory with default options adds to the existing window + // rather than creating a new one. cx.update(|cx| { open_paths( &[PathBuf::from(path!("/root/dir2"))], @@ -2572,25 +2746,23 @@ mod tests { }) .await .unwrap(); - assert_eq!(cx.update(|cx| cx.windows().len()), 2); - let window2 = cx.update(|cx| cx.active_window().unwrap()); - assert!(window1 != window2); - cx.update_window(window1, |_, window, _| window.activate_window()) - .unwrap(); + assert_eq!(cx.update(|cx| cx.windows().len()), 1); + // Opening a directory with -n creates a new window. cx.update(|cx| { open_paths( - &[PathBuf::from(path!("/root/dir2/c"))], + &[PathBuf::from(path!("/root/dir2"))], app_state.clone(), - workspace::OpenOptions::default(), + workspace::OpenOptions { + open_new_workspace: Some(true), + ..Default::default() + }, cx, ) }) .await .unwrap(); assert_eq!(cx.update(|cx| cx.windows().len()), 2); - // should have opened in window2 because that has dir2 visibly open (window1 has it open, but not in the project panel) - assert!(cx.update(|cx| cx.active_window().unwrap()) == window2); } #[gpui::test] @@ -4994,6 +5166,7 @@ mod tests { app_state.languages.add(markdown_lang()); gpui_tokio::init(cx); + AppState::set_global(app_state.clone(), cx); theme_settings::init(theme::LoadThemes::JustBase, cx); audio::init(cx); channel::init(&app_state.client, app_state.user_store.clone(), cx); @@ -5015,7 +5188,12 @@ mod tests { cx, ); image_viewer::init(cx); - language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx); + language_model::init(cx); + client::RefreshLlmTokenListener::register( + app_state.client.clone(), + app_state.user_store.clone(), + cx, + ); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); web_search::init(cx); git_graph::init(cx); @@ -5333,6 +5511,11 @@ mod tests { let project = project1.clone(); |window, cx| MultiWorkspace::test_new(project, window, cx) }); + window + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .unwrap(); cx.run_until_parked(); assert_eq!(cx.windows().len(), 1, "Should start with 1 window"); @@ -5355,7 +5538,7 @@ mod tests { let workspace1 = window .read_with(cx, |multi_workspace, _| { - multi_workspace.workspaces()[0].clone() + multi_workspace.workspaces().next().unwrap().clone() }) .unwrap(); @@ -5364,8 +5547,8 @@ mod tests { multi_workspace.activate(workspace2.clone(), window, cx); multi_workspace.activate(workspace3.clone(), window, cx); // Switch back to workspace1 for test setup - multi_workspace.activate(workspace1, window, cx); - assert_eq!(multi_workspace.active_workspace_index(), 0); + multi_workspace.activate(workspace1.clone(), window, cx); + assert_eq!(multi_workspace.workspace(), &workspace1); }) .unwrap(); @@ -5374,8 +5557,8 @@ mod tests { // Verify setup: 3 workspaces, workspace 0 active, still 1 window window .read_with(cx, |multi_workspace, _| { - assert_eq!(multi_workspace.workspaces().len(), 3); - assert_eq!(multi_workspace.active_workspace_index(), 0); + assert_eq!(multi_workspace.workspaces().count(), 3); + assert_eq!(multi_workspace.workspace(), &workspace1); }) .unwrap(); assert_eq!(cx.windows().len(), 1); @@ -5398,8 +5581,8 @@ mod tests { window .read_with(cx, |multi_workspace, cx| { assert_eq!( - multi_workspace.active_workspace_index(), - 2, + multi_workspace.workspace(), + &workspace3, "Should have switched to workspace 3 which contains /dir3" ); let active_item = multi_workspace @@ -5432,8 +5615,8 @@ mod tests { window .read_with(cx, |multi_workspace, cx| { assert_eq!( - multi_workspace.active_workspace_index(), - 1, + multi_workspace.workspace(), + &workspace2, "Should have switched to workspace 2 which contains /dir2" ); let active_item = multi_workspace @@ -5481,8 +5664,8 @@ mod tests { window .read_with(cx, |multi_workspace, cx| { assert_eq!( - multi_workspace.active_workspace_index(), - 0, + multi_workspace.workspace(), + &workspace1, "Should have switched back to workspace 0 which contains /dir1" ); let active_item = multi_workspace @@ -5532,6 +5715,11 @@ mod tests { let project = project1.clone(); |window, cx| MultiWorkspace::test_new(project, window, cx) }); + window1 + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .unwrap(); cx.run_until_parked(); @@ -5558,6 +5746,11 @@ mod tests { let project = project3.clone(); |window, cx| MultiWorkspace::test_new(project, window, cx) }); + window2 + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .unwrap(); cx.run_until_parked(); assert_eq!(cx.windows().len(), 2); @@ -5592,7 +5785,7 @@ mod tests { // Verify workspace1_1 is active window1 .read_with(cx, |multi_workspace, _| { - assert_eq!(multi_workspace.active_workspace_index(), 0); + assert_eq!(multi_workspace.workspace(), &workspace1_1); }) .unwrap(); @@ -5658,7 +5851,7 @@ mod tests { // Verify workspace1_1 is still active (not workspace1_2 with dirty item) window1 .read_with(cx, |multi_workspace, _| { - assert_eq!(multi_workspace.active_workspace_index(), 0); + assert_eq!(multi_workspace.workspace(), &workspace1_1); }) .unwrap(); @@ -5669,8 +5862,8 @@ mod tests { window1 .read_with(cx, |multi_workspace, _| { assert_eq!( - multi_workspace.active_workspace_index(), - 1, + multi_workspace.workspace(), + &workspace1_2, "Case 2: Non-active workspace should be activated when it has dirty item" ); }) @@ -5778,7 +5971,9 @@ mod tests { #[gpui::test] async fn test_multi_workspace_session_restore(cx: &mut TestAppContext) { use collections::HashMap; + use project::ProjectGroupKey; use session::Session; + use util::path_list::PathList; use workspace::{OpenMode, Workspace, WorkspaceId}; let app_state = init_test(cx); @@ -5821,6 +6016,12 @@ mod tests { .await .expect("failed to open first workspace"); + window_a + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .unwrap(); + window_a .update(cx, |multi_workspace, window, cx| { multi_workspace.open_project(vec![dir2.into()], OpenMode::Activate, window, cx) @@ -5847,13 +6048,19 @@ mod tests { .await .expect("failed to open third workspace"); + window_b + .update(cx, |multi_workspace, _, cx| { + multi_workspace.open_sidebar(cx); + }) + .unwrap(); + // Currently dir2 is active because it was added last. // So, switch window_a's active workspace to dir1 (index 0). // This sets up a non-trivial assertion: after restore, dir1 should // still be active rather than whichever workspace happened to restore last. window_a .update(cx, |multi_workspace, window, cx| { - let workspace = multi_workspace.workspaces()[0].clone(); + let workspace = multi_workspace.workspaces().next().unwrap().clone(); multi_workspace.activate(workspace, window, cx); }) .unwrap(); @@ -5938,94 +6145,50 @@ mod tests { .filter_map(|window| window.downcast::()) .collect() }); + assert_eq!(restored_windows.len(), 2,); + + // Identify restored windows by their active workspace root paths. + let (restored_a, restored_b) = { + let (mut with_dir1, mut with_dir3) = (None, None); + for window in &restored_windows { + let active_paths = window + .read_with(cx, |mw, cx| mw.workspace().read(cx).root_paths(cx)) + .unwrap(); + if active_paths.iter().any(|p| p.as_ref() == Path::new(dir1)) { + with_dir1 = Some(window); + } else { + with_dir3 = Some(window); + } + } + ( + with_dir1.expect("expected a window with dir1 active"), + with_dir3.expect("expected a window with dir3 active"), + ) + }; - assert_eq!( - restored_windows.len(), - 2, - "expected 2 restored windows, got {}", - restored_windows.len() - ); - - let workspace_counts: Vec = restored_windows - .iter() - .map(|window| { - window - .read_with(cx, |multi_workspace, _| multi_workspace.workspaces().len()) - .unwrap() - }) - .collect(); - let mut sorted_counts = workspace_counts.clone(); - sorted_counts.sort(); - assert_eq!( - sorted_counts, - vec![1, 2], - "expected one window with 1 workspace and one with 2, got {workspace_counts:?}" - ); - - let dir1_path: Arc = Path::new(dir1).into(); - let dir2_path: Arc = Path::new(dir2).into(); - let dir3_path: Arc = Path::new(dir3).into(); - - let all_restored_paths: Vec>>> = restored_windows - .iter() - .map(|window| { - window - .read_with(cx, |multi_workspace, cx| { - multi_workspace - .workspaces() - .iter() - .map(|ws| ws.read(cx).root_paths(cx)) - .collect() - }) - .unwrap() + // Window A (dir1+dir2): 1 workspace restored, but 2 project group keys. + restored_a + .read_with(cx, |mw, _| { + assert_eq!( + mw.project_group_keys().cloned().collect::>(), + vec![ + ProjectGroupKey::new(None, PathList::new(&[dir1])), + ProjectGroupKey::new(None, PathList::new(&[dir2])), + ] + ); + assert_eq!(mw.workspaces().count(), 1); }) - .collect(); - - let two_ws_window = all_restored_paths - .iter() - .find(|paths| paths.len() == 2) - .expect("expected a window with 2 workspaces"); - assert!( - two_ws_window.iter().any(|p| p.contains(&dir1_path)), - "2-workspace window should contain dir1, got {two_ws_window:?}" - ); - assert!( - two_ws_window.iter().any(|p| p.contains(&dir2_path)), - "2-workspace window should contain dir2, got {two_ws_window:?}" - ); - - let one_ws_window = all_restored_paths - .iter() - .find(|paths| paths.len() == 1) - .expect("expected a window with 1 workspace"); - assert!( - one_ws_window[0].contains(&dir3_path), - "1-workspace window should contain dir3, got {one_ws_window:?}" - ); - - // --- Verify the active workspace is preserved --- - for window in &restored_windows { - let (active_paths, workspace_count) = window - .read_with(cx, |multi_workspace, cx| { - let active = multi_workspace.workspace(); - ( - active.read(cx).root_paths(cx), - multi_workspace.workspaces().len(), - ) - }) - .unwrap(); + .unwrap(); - if workspace_count == 2 { - assert!( - active_paths.contains(&dir1_path), - "2-workspace window should have dir1 active, got {active_paths:?}" - ); - } else { - assert!( - active_paths.contains(&dir3_path), - "1-workspace window should have dir3 active, got {active_paths:?}" + // Window B (dir3): 1 workspace, 1 project group key. + restored_b + .read_with(cx, |mw, _| { + assert_eq!( + mw.project_group_keys().cloned().collect::>(), + vec![ProjectGroupKey::new(None, PathList::new(&[dir3]))] ); - } - } + assert_eq!(mw.workspaces().count(), 1); + }) + .unwrap(); } } diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index 8c9e74a42e6c3ddb2b340ac58da39752009825f0..d09dc07af839a681cea96d43217c4217927864d5 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -313,7 +313,12 @@ mod tests { let app_state = cx.update(|cx| { let app_state = AppState::test(cx); client::init(&app_state.client, cx); - language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx); + language_model::init(cx); + client::RefreshLlmTokenListener::register( + app_state.client.clone(), + app_state.user_store.clone(), + cx, + ); editor::init(cx); app_state }); diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index 7645eae88d69f777f650ac9f86724bfef0f10bc5..0a302291cacc8caa9e0618da00b8d7c6370ccf0e 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -37,6 +37,7 @@ pub struct OpenRequest { pub open_paths: Vec, pub diff_paths: Vec<[String; 2]>, pub diff_all: bool, + pub dev_container: bool, pub open_channel_notes: Vec<(u64, Option)>, pub join_channel: Option, pub remote_connection: Option, @@ -78,6 +79,7 @@ impl OpenRequest { this.diff_paths = request.diff_paths; this.diff_all = request.diff_all; + this.dev_container = request.dev_container; if let Some(wsl) = request.wsl { let (user, distro_name) = if let Some((user, distro)) = wsl.split_once('@') { if user.is_empty() { @@ -256,6 +258,7 @@ pub struct RawOpenRequest { pub urls: Vec, pub diff_paths: Vec<[String; 2]>, pub diff_all: bool, + pub dev_container: bool, pub wsl: Option, } @@ -413,6 +416,7 @@ pub async fn handle_cli_connection( reuse, env, user_data_dir: _, + dev_container, } => { if !urls.is_empty() { cx.update(|cx| { @@ -421,6 +425,7 @@ pub async fn handle_cli_connection( urls, diff_paths, diff_all, + dev_container, wsl, }, cx, @@ -450,6 +455,7 @@ pub async fn handle_cli_connection( reuse, &responses, wait, + dev_container, app_state.clone(), env, cx, @@ -471,6 +477,7 @@ async fn open_workspaces( reuse: bool, responses: &IpcSender, wait: bool, + dev_container: bool, app_state: Arc, env: Option>, cx: &mut AsyncApp, @@ -532,6 +539,7 @@ async fn open_workspaces( requesting_window: replace_window, wait, env: env.clone(), + open_in_dev_container: dev_container, ..Default::default() }; @@ -1545,4 +1553,123 @@ mod tests { }) .unwrap(); } + + #[gpui::test] + async fn test_dev_container_flag_opens_modal(cx: &mut TestAppContext) { + let app_state = init_test(cx); + cx.update(|cx| recent_projects::init(cx)); + + app_state + .fs + .as_fake() + .insert_tree( + path!("/project"), + json!({ + ".devcontainer": { + "devcontainer.json": "{}" + }, + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + + let (response_tx, _) = ipc::channel::().unwrap(); + let errored = cx + .spawn({ + let app_state = app_state.clone(); + |mut cx| async move { + open_local_workspace( + vec![path!("/project").to_owned()], + vec![], + false, + workspace::OpenOptions { + open_in_dev_container: true, + ..Default::default() + }, + &response_tx, + &app_state, + &mut cx, + ) + .await + } + }) + .await; + + assert!(!errored); + + let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::().unwrap()); + multi_workspace + .update(cx, |multi_workspace, _, cx| { + let flag = multi_workspace.workspace().read(cx).open_in_dev_container(); + assert!( + !flag, + "open_in_dev_container flag should be consumed by suggest_on_worktree_updated" + ); + }) + .unwrap(); + } + + #[gpui::test] + async fn test_dev_container_flag_cleared_without_config(cx: &mut TestAppContext) { + let app_state = init_test(cx); + cx.update(|cx| recent_projects::init(cx)); + + app_state + .fs + .as_fake() + .insert_tree( + path!("/project"), + json!({ + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + + let (response_tx, _) = ipc::channel::().unwrap(); + let errored = cx + .spawn({ + let app_state = app_state.clone(); + |mut cx| async move { + open_local_workspace( + vec![path!("/project").to_owned()], + vec![], + false, + workspace::OpenOptions { + open_in_dev_container: true, + ..Default::default() + }, + &response_tx, + &app_state, + &mut cx, + ) + .await + } + }) + .await; + + assert!(!errored); + + // Let any pending worktree scan events and updates settle. + cx.run_until_parked(); + + // With no .devcontainer config, the flag should be cleared once the + // worktree scan completes, rather than persisting on the workspace. + let multi_workspace = cx.update(|cx| cx.windows()[0].downcast::().unwrap()); + multi_workspace + .update(cx, |multi_workspace, _, cx| { + let flag = multi_workspace + .workspace() + .read(cx) + .open_in_dev_container(); + assert!( + !flag, + "open_in_dev_container flag should be cleared when no devcontainer config exists" + ); + }) + .unwrap(); + } } diff --git a/crates/zed/src/zed/windows_only_instance.rs b/crates/zed/src/zed/windows_only_instance.rs index 5790715bc13bdcc68d180519d9176873bd81bc50..f22f49e26a982cb8cb68e21645033819e059de36 100644 --- a/crates/zed/src/zed/windows_only_instance.rs +++ b/crates/zed/src/zed/windows_only_instance.rs @@ -162,6 +162,7 @@ fn send_args_to_instance(args: &Args) -> anyhow::Result<()> { reuse: false, env: None, user_data_dir: args.user_data_dir.clone(), + dev_container: args.dev_container, } }; diff --git a/crates/zed_credentials_provider/Cargo.toml b/crates/zed_credentials_provider/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..9f64801d4664111bceb0fb7b9ee8c007977b6389 --- /dev/null +++ b/crates/zed_credentials_provider/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "zed_credentials_provider" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/zed_credentials_provider.rs" + +[dependencies] +anyhow.workspace = true +credentials_provider.workspace = true +futures.workspace = true +gpui.workspace = true +paths.workspace = true +release_channel.workspace = true +serde.workspace = true +serde_json.workspace = true diff --git a/crates/zed_credentials_provider/LICENSE-GPL b/crates/zed_credentials_provider/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/zed_credentials_provider/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/zed_credentials_provider/src/zed_credentials_provider.rs b/crates/zed_credentials_provider/src/zed_credentials_provider.rs new file mode 100644 index 0000000000000000000000000000000000000000..6705e58d400b1a66990f2451d318b5950ea08dde --- /dev/null +++ b/crates/zed_credentials_provider/src/zed_credentials_provider.rs @@ -0,0 +1,181 @@ +use std::collections::HashMap; +use std::future::Future; +use std::path::PathBuf; +use std::pin::Pin; +use std::sync::{Arc, LazyLock}; + +use anyhow::Result; +use credentials_provider::CredentialsProvider; +use futures::FutureExt as _; +use gpui::{App, AsyncApp, Global}; +use release_channel::ReleaseChannel; + +/// An environment variable whose presence indicates that the system keychain +/// should be used in development. +/// +/// By default, running Zed in development uses the development credentials +/// provider. Setting this environment variable allows you to interact with the +/// system keychain (for instance, if you need to test something). +/// +/// Only works in development. Setting this environment variable in other +/// release channels is a no-op. +static ZED_DEVELOPMENT_USE_KEYCHAIN: LazyLock = LazyLock::new(|| { + std::env::var("ZED_DEVELOPMENT_USE_KEYCHAIN").is_ok_and(|value| !value.is_empty()) +}); + +pub struct ZedCredentialsProvider(pub Arc); + +impl Global for ZedCredentialsProvider {} + +/// Returns the global [`CredentialsProvider`]. +pub fn init_global(cx: &mut App) { + // The `CredentialsProvider` trait has `Send + Sync` bounds on it, so it + // seems like this is a false positive from Clippy. + #[allow(clippy::arc_with_non_send_sync)] + let provider = new(cx); + cx.set_global(ZedCredentialsProvider(provider)); +} + +pub fn global(cx: &App) -> Arc { + cx.try_global::() + .map(|provider| provider.0.clone()) + .unwrap_or_else(|| new(cx)) +} + +fn new(cx: &App) -> Arc { + let use_development_provider = match ReleaseChannel::try_global(cx) { + Some(ReleaseChannel::Dev) => { + // In development we default to using the development + // credentials provider to avoid getting spammed by relentless + // keychain access prompts. + // + // However, if the `ZED_DEVELOPMENT_USE_KEYCHAIN` environment + // variable is set, we will use the actual keychain. + !*ZED_DEVELOPMENT_USE_KEYCHAIN + } + Some(ReleaseChannel::Nightly | ReleaseChannel::Preview | ReleaseChannel::Stable) | None => { + false + } + }; + + if use_development_provider { + Arc::new(DevelopmentCredentialsProvider::new()) + } else { + Arc::new(KeychainCredentialsProvider) + } +} + +/// A credentials provider that stores credentials in the system keychain. +struct KeychainCredentialsProvider; + +impl CredentialsProvider for KeychainCredentialsProvider { + fn read_credentials<'a>( + &'a self, + url: &'a str, + cx: &'a AsyncApp, + ) -> Pin)>>> + 'a>> { + async move { cx.update(|cx| cx.read_credentials(url)).await }.boxed_local() + } + + fn write_credentials<'a>( + &'a self, + url: &'a str, + username: &'a str, + password: &'a [u8], + cx: &'a AsyncApp, + ) -> Pin> + 'a>> { + async move { + cx.update(move |cx| cx.write_credentials(url, username, password)) + .await + } + .boxed_local() + } + + fn delete_credentials<'a>( + &'a self, + url: &'a str, + cx: &'a AsyncApp, + ) -> Pin> + 'a>> { + async move { cx.update(move |cx| cx.delete_credentials(url)).await }.boxed_local() + } +} + +/// A credentials provider that stores credentials in a local file. +/// +/// This MUST only be used in development, as this is not a secure way of storing +/// credentials on user machines. +/// +/// Its existence is purely to work around the annoyance of having to constantly +/// re-allow access to the system keychain when developing Zed. +struct DevelopmentCredentialsProvider { + path: PathBuf, +} + +impl DevelopmentCredentialsProvider { + fn new() -> Self { + let path = paths::config_dir().join("development_credentials"); + + Self { path } + } + + fn load_credentials(&self) -> Result)>> { + let json = std::fs::read(&self.path)?; + let credentials: HashMap)> = serde_json::from_slice(&json)?; + + Ok(credentials) + } + + fn save_credentials(&self, credentials: &HashMap)>) -> Result<()> { + let json = serde_json::to_string(credentials)?; + std::fs::write(&self.path, json)?; + + Ok(()) + } +} + +impl CredentialsProvider for DevelopmentCredentialsProvider { + fn read_credentials<'a>( + &'a self, + url: &'a str, + _cx: &'a AsyncApp, + ) -> Pin)>>> + 'a>> { + async move { + Ok(self + .load_credentials() + .unwrap_or_default() + .get(url) + .cloned()) + } + .boxed_local() + } + + fn write_credentials<'a>( + &'a self, + url: &'a str, + username: &'a str, + password: &'a [u8], + _cx: &'a AsyncApp, + ) -> Pin> + 'a>> { + async move { + let mut credentials = self.load_credentials().unwrap_or_default(); + credentials.insert(url.to_string(), (username.to_string(), password.to_vec())); + + self.save_credentials(&credentials) + } + .boxed_local() + } + + fn delete_credentials<'a>( + &'a self, + url: &'a str, + _cx: &'a AsyncApp, + ) -> Pin> + 'a>> { + async move { + let mut credentials = self.load_credentials()?; + credentials.remove(url); + + self.save_credentials(&credentials) + } + .boxed_local() + } +} diff --git a/crates/zed_env_vars/Cargo.toml b/crates/zed_env_vars/Cargo.toml index 1cf32174c351c28ec7eb16deab7b7986655d4a48..bf863b742568f3f607ba7cb54bc8fc267f045cc9 100644 --- a/crates/zed_env_vars/Cargo.toml +++ b/crates/zed_env_vars/Cargo.toml @@ -15,4 +15,4 @@ path = "src/zed_env_vars.rs" default = [] [dependencies] -gpui.workspace = true +env_var.workspace = true diff --git a/crates/zed_env_vars/src/zed_env_vars.rs b/crates/zed_env_vars/src/zed_env_vars.rs index e601cc9536602ac943bd76bf1bfd8b8ac8979dd9..13451911295735762074bcb1cf152470afa55c36 100644 --- a/crates/zed_env_vars/src/zed_env_vars.rs +++ b/crates/zed_env_vars/src/zed_env_vars.rs @@ -1,45 +1,6 @@ -use gpui::SharedString; +pub use env_var::{EnvVar, bool_env_var, env_var}; use std::sync::LazyLock; /// Whether Zed is running in stateless mode. /// When true, Zed will use in-memory databases instead of persistent storage. pub static ZED_STATELESS: LazyLock = bool_env_var!("ZED_STATELESS"); - -#[derive(Clone)] -pub struct EnvVar { - pub name: SharedString, - /// Value of the environment variable. Also `None` when set to an empty string. - pub value: Option, -} - -impl EnvVar { - pub fn new(name: SharedString) -> Self { - let value = std::env::var(name.as_str()).ok(); - if value.as_ref().is_some_and(|v| v.is_empty()) { - Self { name, value: None } - } else { - Self { name, value } - } - } - - pub fn or(self, other: EnvVar) -> EnvVar { - if self.value.is_some() { self } else { other } - } -} - -/// Creates a `LazyLock` expression for use in a `static` declaration. -#[macro_export] -macro_rules! env_var { - ($name:expr) => { - ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into())) - }; -} - -/// Generates a `LazyLock` expression for use in a `static` declaration. Checks if the -/// environment variable exists and is non-empty. -#[macro_export] -macro_rules! bool_env_var { - ($name:expr) => { - ::std::sync::LazyLock::new(|| $crate::EnvVar::new(($name).into()).value.is_some()) - }; -} diff --git a/crates/zeta_prompt/Cargo.toml b/crates/zeta_prompt/Cargo.toml index 21634583d33e13cd9570041f3e8466d05cef9944..8acd91a7a43613fd63f4f46ab73e9485fd64e7d2 100644 --- a/crates/zeta_prompt/Cargo.toml +++ b/crates/zeta_prompt/Cargo.toml @@ -13,6 +13,7 @@ path = "src/zeta_prompt.rs" [dependencies] anyhow.workspace = true +imara-diff.workspace = true serde.workspace = true strum.workspace = true diff --git a/crates/zeta_prompt/src/udiff.rs b/crates/zeta_prompt/src/udiff.rs new file mode 100644 index 0000000000000000000000000000000000000000..ab0837b9f54ac0bf9ef74038f0c876b751f70200 --- /dev/null +++ b/crates/zeta_prompt/src/udiff.rs @@ -0,0 +1,1406 @@ +use std::{ + borrow::Cow, + fmt::{Display, Write}, + mem, + ops::Range, +}; + +use anyhow::{Context as _, Result, anyhow}; +use imara_diff::{ + Algorithm, Sink, diff, + intern::{InternedInput, Interner, Token}, +}; + +pub fn strip_diff_path_prefix<'a>(diff: &'a str, prefix: &str) -> Cow<'a, str> { + if prefix.is_empty() { + return Cow::Borrowed(diff); + } + + let prefix_with_slash = format!("{}/", prefix); + let mut needs_rewrite = false; + + for line in diff.lines() { + match DiffLine::parse(line) { + DiffLine::OldPath { path } | DiffLine::NewPath { path } => { + if path.starts_with(&prefix_with_slash) { + needs_rewrite = true; + break; + } + } + _ => {} + } + } + + if !needs_rewrite { + return Cow::Borrowed(diff); + } + + let mut result = String::with_capacity(diff.len()); + for line in diff.lines() { + match DiffLine::parse(line) { + DiffLine::OldPath { path } => { + let stripped = path + .strip_prefix(&prefix_with_slash) + .unwrap_or(path.as_ref()); + result.push_str(&format!("--- a/{}\n", stripped)); + } + DiffLine::NewPath { path } => { + let stripped = path + .strip_prefix(&prefix_with_slash) + .unwrap_or(path.as_ref()); + result.push_str(&format!("+++ b/{}\n", stripped)); + } + _ => { + result.push_str(line); + result.push('\n'); + } + } + } + + Cow::Owned(result) +} + +/// Strip unnecessary git metadata lines from a diff, keeping only the lines +/// needed for patch application: path headers (--- and +++), hunk headers (@@), +/// and content lines (+, -, space). +pub fn strip_diff_metadata(diff: &str) -> String { + let mut result = String::new(); + + for line in diff.lines() { + let dominated = DiffLine::parse(line); + match dominated { + // Keep path headers, hunk headers, and content lines + DiffLine::OldPath { .. } + | DiffLine::NewPath { .. } + | DiffLine::HunkHeader(_) + | DiffLine::Context(_) + | DiffLine::Deletion(_) + | DiffLine::Addition(_) + | DiffLine::NoNewlineAtEOF => { + result.push_str(line); + result.push('\n'); + } + // Skip garbage lines (diff --git, index, etc.) + DiffLine::Garbage(_) => {} + } + } + + result +} + +/// Marker used to encode cursor position in patch comment lines. +pub const CURSOR_POSITION_MARKER: &str = "[CURSOR_POSITION]"; + +/// Extract cursor offset from a patch and return `(clean_patch, cursor_offset)`. +/// +/// Cursor position is encoded as a comment line (starting with `#`) containing +/// `[CURSOR_POSITION]`. A `^` in the line indicates the cursor column; a `<` +/// indicates column 0. The offset is computed relative to addition (`+`) and +/// context (` `) lines accumulated so far in the hunk, which represent the +/// cursor position within the new text contributed by the hunk. +pub fn extract_cursor_from_patch(patch: &str) -> (String, Option) { + let mut clean_patch = String::new(); + let mut cursor_offset: Option = None; + let mut line_start_offset = 0usize; + let mut prev_line_start_offset = 0usize; + + for line in patch.lines() { + let diff_line = DiffLine::parse(line); + + match &diff_line { + DiffLine::Garbage(content) + if content.starts_with('#') && content.contains(CURSOR_POSITION_MARKER) => + { + let caret_column = if let Some(caret_pos) = content.find('^') { + caret_pos + } else if content.find('<').is_some() { + 0 + } else { + continue; + }; + let cursor_column = caret_column.saturating_sub('#'.len_utf8()); + cursor_offset = Some(prev_line_start_offset + cursor_column); + } + _ => { + if !clean_patch.is_empty() { + clean_patch.push('\n'); + } + clean_patch.push_str(line); + + match diff_line { + DiffLine::Addition(content) | DiffLine::Context(content) => { + prev_line_start_offset = line_start_offset; + line_start_offset += content.len() + 1; + } + _ => {} + } + } + } + } + + if patch.ends_with('\n') && !clean_patch.is_empty() { + clean_patch.push('\n'); + } + + (clean_patch, cursor_offset) +} + +/// Find all byte offsets where `hunk.context` occurs as a substring of `text`. +/// +/// If no exact matches are found and the context ends with `'\n'` but `text` +/// does not, retries without the trailing newline, accepting only a match at +/// the very end of `text`. When this fallback fires, the hunk's context is +/// trimmed and its edit ranges are clamped so that downstream code doesn't +/// index past the end of the matched region. This handles diffs that are +/// missing a `\ No newline at end of file` marker: the parser always appends +/// `'\n'` via `writeln!`, so the context can have a trailing newline that +/// doesn't exist in the source text. +pub fn find_context_candidates(text: &str, hunk: &mut Hunk) -> Vec { + let candidates: Vec = text + .match_indices(&hunk.context) + .map(|(offset, _)| offset) + .collect(); + + if !candidates.is_empty() { + return candidates; + } + + if hunk.context.ends_with('\n') && !hunk.context.is_empty() { + let old_len = hunk.context.len(); + hunk.context.pop(); + let new_len = hunk.context.len(); + + if !hunk.context.is_empty() { + let candidates: Vec = text + .match_indices(&hunk.context) + .filter(|(offset, _)| offset + new_len == text.len()) + .map(|(offset, _)| offset) + .collect(); + + if !candidates.is_empty() { + for edit in &mut hunk.edits { + let touched_phantom = edit.range.end > new_len; + edit.range.start = edit.range.start.min(new_len); + edit.range.end = edit.range.end.min(new_len); + if touched_phantom { + // The replacement text was also written with a + // trailing '\n' that corresponds to the phantom + // newline we just removed from the context. + if edit.text.ends_with('\n') { + edit.text.pop(); + } + } + } + return candidates; + } + + // Restore if fallback didn't help either. + hunk.context.push('\n'); + debug_assert_eq!(hunk.context.len(), old_len); + } else { + hunk.context.push('\n'); + } + } + + Vec::new() +} + +/// Given multiple candidate offsets where context matches, use line numbers to disambiguate. +/// Returns the offset that matches the expected line, or None if no match or no line number available. +pub fn disambiguate_by_line_number( + candidates: &[usize], + expected_line: Option, + offset_to_line: &dyn Fn(usize) -> u32, +) -> Option { + match candidates.len() { + 0 => None, + 1 => Some(candidates[0]), + _ => { + let expected = expected_line?; + candidates + .iter() + .copied() + .find(|&offset| offset_to_line(offset) == expected) + } + } +} + +pub fn unified_diff_with_context( + old_text: &str, + new_text: &str, + old_start_line: u32, + new_start_line: u32, + context_lines: u32, +) -> String { + let input = InternedInput::new(old_text, new_text); + diff( + Algorithm::Histogram, + &input, + OffsetUnifiedDiffBuilder::new(&input, old_start_line, new_start_line, context_lines), + ) +} + +struct OffsetUnifiedDiffBuilder<'a> { + before: &'a [Token], + after: &'a [Token], + interner: &'a Interner<&'a str>, + pos: u32, + before_hunk_start: u32, + after_hunk_start: u32, + before_hunk_len: u32, + after_hunk_len: u32, + old_line_offset: u32, + new_line_offset: u32, + context_lines: u32, + buffer: String, + dst: String, +} + +impl<'a> OffsetUnifiedDiffBuilder<'a> { + fn new( + input: &'a InternedInput<&'a str>, + old_line_offset: u32, + new_line_offset: u32, + context_lines: u32, + ) -> Self { + Self { + before_hunk_start: 0, + after_hunk_start: 0, + before_hunk_len: 0, + after_hunk_len: 0, + old_line_offset, + new_line_offset, + context_lines, + buffer: String::with_capacity(8), + dst: String::new(), + interner: &input.interner, + before: &input.before, + after: &input.after, + pos: 0, + } + } + + fn print_tokens(&mut self, tokens: &[Token], prefix: char) { + for &token in tokens { + writeln!(&mut self.buffer, "{prefix}{}", self.interner[token]).unwrap(); + } + } + + fn flush(&mut self) { + if self.before_hunk_len == 0 && self.after_hunk_len == 0 { + return; + } + + let end = (self.pos + self.context_lines).min(self.before.len() as u32); + self.update_pos(end, end); + + writeln!( + &mut self.dst, + "@@ -{},{} +{},{} @@", + self.before_hunk_start + 1 + self.old_line_offset, + self.before_hunk_len, + self.after_hunk_start + 1 + self.new_line_offset, + self.after_hunk_len, + ) + .unwrap(); + write!(&mut self.dst, "{}", &self.buffer).unwrap(); + self.buffer.clear(); + self.before_hunk_len = 0; + self.after_hunk_len = 0; + } + + fn update_pos(&mut self, print_to: u32, move_to: u32) { + self.print_tokens(&self.before[self.pos as usize..print_to as usize], ' '); + let len = print_to - self.pos; + self.before_hunk_len += len; + self.after_hunk_len += len; + self.pos = move_to; + } +} + +impl Sink for OffsetUnifiedDiffBuilder<'_> { + type Out = String; + + fn process_change(&mut self, before: Range, after: Range) { + if before.start - self.pos > self.context_lines * 2 { + self.flush(); + } + if self.before_hunk_len == 0 && self.after_hunk_len == 0 { + self.pos = before.start.saturating_sub(self.context_lines); + self.before_hunk_start = self.pos; + self.after_hunk_start = after.start.saturating_sub(self.context_lines); + } + + self.update_pos(before.start, before.end); + self.before_hunk_len += before.end - before.start; + self.after_hunk_len += after.end - after.start; + self.print_tokens( + &self.before[before.start as usize..before.end as usize], + '-', + ); + self.print_tokens(&self.after[after.start as usize..after.end as usize], '+'); + } + + fn finish(mut self) -> Self::Out { + self.flush(); + self.dst + } +} + +pub fn encode_cursor_in_patch(patch: &str, cursor_offset: Option) -> String { + let Some(cursor_offset) = cursor_offset else { + return patch.to_string(); + }; + + let mut result = String::new(); + let mut line_start_offset = 0usize; + + for line in patch.lines() { + if matches!( + DiffLine::parse(line), + DiffLine::Garbage(content) + if content.starts_with('#') && content.contains(CURSOR_POSITION_MARKER) + ) { + continue; + } + + if !result.is_empty() { + result.push('\n'); + } + result.push_str(line); + + match DiffLine::parse(line) { + DiffLine::Addition(content) => { + let line_end_offset = line_start_offset + content.len(); + + if cursor_offset >= line_start_offset && cursor_offset <= line_end_offset { + let cursor_column = cursor_offset - line_start_offset; + + result.push('\n'); + result.push('#'); + for _ in 0..cursor_column { + result.push(' '); + } + write!(result, "^{}", CURSOR_POSITION_MARKER).unwrap(); + } + + line_start_offset = line_end_offset + 1; + } + DiffLine::Context(content) => { + line_start_offset += content.len() + 1; + } + _ => {} + } + } + + if patch.ends_with('\n') { + result.push('\n'); + } + + result +} + +pub fn apply_diff_to_string(diff_str: &str, text: &str) -> Result { + apply_diff_to_string_with_hunk_offset(diff_str, text).map(|(text, _)| text) +} + +/// Applies a diff to a string and returns the result along with the offset where +/// the first hunk's context matched in the original text. This offset can be used +/// to adjust cursor positions that are relative to the hunk's content. +pub fn apply_diff_to_string_with_hunk_offset( + diff_str: &str, + text: &str, +) -> Result<(String, Option)> { + let mut diff = DiffParser::new(diff_str); + + let mut text = text.to_string(); + let mut first_hunk_offset = None; + + while let Some(event) = diff.next().context("Failed to parse diff")? { + match event { + DiffEvent::Hunk { + mut hunk, + path: _, + status: _, + } => { + let candidates = find_context_candidates(&text, &mut hunk); + + let hunk_offset = + disambiguate_by_line_number(&candidates, hunk.start_line, &|offset| { + text[..offset].matches('\n').count() as u32 + }) + .ok_or_else(|| anyhow!("couldn't resolve hunk"))?; + + if first_hunk_offset.is_none() { + first_hunk_offset = Some(hunk_offset); + } + + for edit in hunk.edits.iter().rev() { + let range = (hunk_offset + edit.range.start)..(hunk_offset + edit.range.end); + text.replace_range(range, &edit.text); + } + } + DiffEvent::FileEnd { .. } => {} + } + } + + Ok((text, first_hunk_offset)) +} + +struct PatchFile<'a> { + old_path: Cow<'a, str>, + new_path: Cow<'a, str>, +} + +pub struct DiffParser<'a> { + current_file: Option>, + current_line: Option<(&'a str, DiffLine<'a>)>, + hunk: Hunk, + diff: std::str::Lines<'a>, + pending_start_line: Option, + processed_no_newline: bool, + last_diff_op: LastDiffOp, +} + +#[derive(Clone, Copy, Default)] +enum LastDiffOp { + #[default] + None, + Context, + Deletion, + Addition, +} + +#[derive(Debug, PartialEq)] +pub enum DiffEvent<'a> { + Hunk { + path: Cow<'a, str>, + hunk: Hunk, + status: FileStatus, + }, + FileEnd { + renamed_to: Option>, + }, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum FileStatus { + Created, + Modified, + Deleted, +} + +#[derive(Debug, Default, PartialEq)] +pub struct Hunk { + pub context: String, + pub edits: Vec, + pub start_line: Option, +} + +impl Hunk { + pub fn is_empty(&self) -> bool { + self.context.is_empty() && self.edits.is_empty() + } +} + +#[derive(Debug, PartialEq)] +pub struct Edit { + pub range: Range, + pub text: String, +} + +impl<'a> DiffParser<'a> { + pub fn new(diff: &'a str) -> Self { + let mut diff = diff.lines(); + let current_line = diff.next().map(|line| (line, DiffLine::parse(line))); + DiffParser { + current_file: None, + hunk: Hunk::default(), + current_line, + diff, + pending_start_line: None, + processed_no_newline: false, + last_diff_op: LastDiffOp::None, + } + } + + pub fn next(&mut self) -> Result>> { + loop { + let (hunk_done, file_done) = match self.current_line.as_ref().map(|e| &e.1) { + Some(DiffLine::OldPath { .. }) | Some(DiffLine::Garbage(_)) | None => (true, true), + Some(DiffLine::HunkHeader(_)) => (true, false), + _ => (false, false), + }; + + if hunk_done { + if let Some(file) = &self.current_file + && !self.hunk.is_empty() + { + let status = if file.old_path == "/dev/null" { + FileStatus::Created + } else if file.new_path == "/dev/null" { + FileStatus::Deleted + } else { + FileStatus::Modified + }; + let path = if status == FileStatus::Created { + file.new_path.clone() + } else { + file.old_path.clone() + }; + let mut hunk = mem::take(&mut self.hunk); + hunk.start_line = self.pending_start_line.take(); + self.processed_no_newline = false; + self.last_diff_op = LastDiffOp::None; + return Ok(Some(DiffEvent::Hunk { path, hunk, status })); + } + } + + if file_done { + if let Some(PatchFile { old_path, new_path }) = self.current_file.take() { + return Ok(Some(DiffEvent::FileEnd { + renamed_to: if old_path != new_path && old_path != "/dev/null" { + Some(new_path) + } else { + None + }, + })); + } + } + + let Some((line, parsed_line)) = self.current_line.take() else { + break; + }; + + (|| { + match parsed_line { + DiffLine::OldPath { path } => { + self.current_file = Some(PatchFile { + old_path: path, + new_path: "".into(), + }); + } + DiffLine::NewPath { path } => { + if let Some(current_file) = &mut self.current_file { + current_file.new_path = path + } + } + DiffLine::HunkHeader(location) => { + if let Some(loc) = location { + self.pending_start_line = Some(loc.start_line_old); + } + } + DiffLine::Context(ctx) => { + if self.current_file.is_some() { + writeln!(&mut self.hunk.context, "{ctx}")?; + self.last_diff_op = LastDiffOp::Context; + } + } + DiffLine::Deletion(del) => { + if self.current_file.is_some() { + let range = self.hunk.context.len() + ..self.hunk.context.len() + del.len() + '\n'.len_utf8(); + if let Some(last_edit) = self.hunk.edits.last_mut() + && last_edit.range.end == range.start + { + last_edit.range.end = range.end; + } else { + self.hunk.edits.push(Edit { + range, + text: String::new(), + }); + } + writeln!(&mut self.hunk.context, "{del}")?; + self.last_diff_op = LastDiffOp::Deletion; + } + } + DiffLine::Addition(add) => { + if self.current_file.is_some() { + let range = self.hunk.context.len()..self.hunk.context.len(); + if let Some(last_edit) = self.hunk.edits.last_mut() + && last_edit.range.end == range.start + { + writeln!(&mut last_edit.text, "{add}").unwrap(); + } else { + self.hunk.edits.push(Edit { + range, + text: format!("{add}\n"), + }); + } + self.last_diff_op = LastDiffOp::Addition; + } + } + DiffLine::NoNewlineAtEOF => { + if !self.processed_no_newline { + self.processed_no_newline = true; + match self.last_diff_op { + LastDiffOp::Addition => { + // Remove trailing newline from the last addition + if let Some(last_edit) = self.hunk.edits.last_mut() { + last_edit.text.pop(); + } + } + LastDiffOp::Deletion => { + // Remove trailing newline from context (which includes the deletion) + self.hunk.context.pop(); + if let Some(last_edit) = self.hunk.edits.last_mut() { + last_edit.range.end -= 1; + } + } + LastDiffOp::Context | LastDiffOp::None => { + // Remove trailing newline from context + self.hunk.context.pop(); + } + } + } + } + DiffLine::Garbage(_) => {} + } + + anyhow::Ok(()) + })() + .with_context(|| format!("on line:\n\n```\n{}```", line))?; + + self.current_line = self.diff.next().map(|line| (line, DiffLine::parse(line))); + } + + anyhow::Ok(None) + } +} + +#[derive(Debug, PartialEq)] +pub enum DiffLine<'a> { + OldPath { path: Cow<'a, str> }, + NewPath { path: Cow<'a, str> }, + HunkHeader(Option), + Context(&'a str), + Deletion(&'a str), + Addition(&'a str), + NoNewlineAtEOF, + Garbage(&'a str), +} + +#[derive(Debug, PartialEq)] +pub struct HunkLocation { + pub start_line_old: u32, + pub count_old: u32, + pub start_line_new: u32, + pub count_new: u32, +} + +impl<'a> DiffLine<'a> { + pub fn parse(line: &'a str) -> Self { + Self::try_parse(line).unwrap_or(Self::Garbage(line)) + } + + fn try_parse(line: &'a str) -> Option { + if line.starts_with("\\ No newline") { + return Some(Self::NoNewlineAtEOF); + } + if let Some(header) = line.strip_prefix("---").and_then(eat_required_whitespace) { + let path = parse_header_path("a/", header); + Some(Self::OldPath { path }) + } else if let Some(header) = line.strip_prefix("+++").and_then(eat_required_whitespace) { + Some(Self::NewPath { + path: parse_header_path("b/", header), + }) + } else if let Some(header) = line.strip_prefix("@@").and_then(eat_required_whitespace) { + if header.starts_with("...") { + return Some(Self::HunkHeader(None)); + } + + let mut tokens = header.split_whitespace(); + let old_range = tokens.next()?.strip_prefix('-')?; + let new_range = tokens.next()?.strip_prefix('+')?; + + let (start_line_old, count_old) = old_range.split_once(',').unwrap_or((old_range, "1")); + let (start_line_new, count_new) = new_range.split_once(',').unwrap_or((new_range, "1")); + + Some(Self::HunkHeader(Some(HunkLocation { + start_line_old: start_line_old.parse::().ok()?.saturating_sub(1), + count_old: count_old.parse().ok()?, + start_line_new: start_line_new.parse::().ok()?.saturating_sub(1), + count_new: count_new.parse().ok()?, + }))) + } else if let Some(deleted_header) = line.strip_prefix("-") { + Some(Self::Deletion(deleted_header)) + } else if line.is_empty() { + Some(Self::Context("")) + } else if let Some(context) = line.strip_prefix(" ") { + Some(Self::Context(context)) + } else { + Some(Self::Addition(line.strip_prefix("+")?)) + } + } +} + +impl<'a> Display for DiffLine<'a> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + DiffLine::OldPath { path } => write!(f, "--- {path}"), + DiffLine::NewPath { path } => write!(f, "+++ {path}"), + DiffLine::HunkHeader(Some(hunk_location)) => { + write!( + f, + "@@ -{},{} +{},{} @@", + hunk_location.start_line_old + 1, + hunk_location.count_old, + hunk_location.start_line_new + 1, + hunk_location.count_new + ) + } + DiffLine::HunkHeader(None) => write!(f, "@@ ... @@"), + DiffLine::Context(content) => write!(f, " {content}"), + DiffLine::Deletion(content) => write!(f, "-{content}"), + DiffLine::Addition(content) => write!(f, "+{content}"), + DiffLine::NoNewlineAtEOF => write!(f, "\\ No newline at end of file"), + DiffLine::Garbage(line) => write!(f, "{line}"), + } + } +} + +fn parse_header_path<'a>(strip_prefix: &'static str, header: &'a str) -> Cow<'a, str> { + if !header.contains(['"', '\\']) { + let path = header.split_ascii_whitespace().next().unwrap_or(header); + return Cow::Borrowed(path.strip_prefix(strip_prefix).unwrap_or(path)); + } + + let mut path = String::with_capacity(header.len()); + let mut in_quote = false; + let mut chars = header.chars().peekable(); + let mut strip_prefix = Some(strip_prefix); + + while let Some(char) = chars.next() { + if char == '"' { + in_quote = !in_quote; + } else if char == '\\' { + let Some(&next_char) = chars.peek() else { + break; + }; + chars.next(); + path.push(next_char); + } else if char.is_ascii_whitespace() && !in_quote { + break; + } else { + path.push(char); + } + + if let Some(prefix) = strip_prefix + && path == prefix + { + strip_prefix.take(); + path.clear(); + } + } + + Cow::Owned(path) +} + +fn eat_required_whitespace(header: &str) -> Option<&str> { + let trimmed = header.trim_ascii_start(); + + if trimmed.len() == header.len() { + None + } else { + Some(trimmed) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use indoc::indoc; + + #[test] + fn parse_lines_simple() { + let input = indoc! {" + diff --git a/text.txt b/text.txt + index 86c770d..a1fd855 100644 + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,3 @@ + context + -deleted + +inserted + garbage + + --- b/file.txt + +++ a/file.txt + "}; + + let lines = input.lines().map(DiffLine::parse).collect::>(); + + assert_eq!( + lines, + &[ + DiffLine::Garbage("diff --git a/text.txt b/text.txt"), + DiffLine::Garbage("index 86c770d..a1fd855 100644"), + DiffLine::OldPath { + path: "file.txt".into() + }, + DiffLine::NewPath { + path: "file.txt".into() + }, + DiffLine::HunkHeader(Some(HunkLocation { + start_line_old: 0, + count_old: 2, + start_line_new: 0, + count_new: 3 + })), + DiffLine::Context("context"), + DiffLine::Deletion("deleted"), + DiffLine::Addition("inserted"), + DiffLine::Garbage("garbage"), + DiffLine::Context(""), + DiffLine::OldPath { + path: "b/file.txt".into() + }, + DiffLine::NewPath { + path: "a/file.txt".into() + }, + ] + ); + } + + #[test] + fn file_header_extra_space() { + let options = ["--- file", "--- file", "---\tfile"]; + + for option in options { + assert_eq!( + DiffLine::parse(option), + DiffLine::OldPath { + path: "file".into() + }, + "{option}", + ); + } + } + + #[test] + fn hunk_header_extra_space() { + let options = [ + "@@ -1,2 +1,3 @@", + "@@ -1,2 +1,3 @@", + "@@\t-1,2\t+1,3\t@@", + "@@ -1,2 +1,3 @@", + "@@ -1,2 +1,3 @@", + "@@ -1,2 +1,3 @@", + "@@ -1,2 +1,3 @@ garbage", + ]; + + for option in options { + assert_eq!( + DiffLine::parse(option), + DiffLine::HunkHeader(Some(HunkLocation { + start_line_old: 0, + count_old: 2, + start_line_new: 0, + count_new: 3 + })), + "{option}", + ); + } + } + + #[test] + fn hunk_header_without_location() { + assert_eq!(DiffLine::parse("@@ ... @@"), DiffLine::HunkHeader(None)); + } + + #[test] + fn test_parse_path() { + assert_eq!(parse_header_path("a/", "foo.txt"), "foo.txt"); + assert_eq!( + parse_header_path("a/", "foo/bar/baz.txt"), + "foo/bar/baz.txt" + ); + assert_eq!(parse_header_path("a/", "a/foo.txt"), "foo.txt"); + assert_eq!( + parse_header_path("a/", "a/foo/bar/baz.txt"), + "foo/bar/baz.txt" + ); + + // Extra + assert_eq!( + parse_header_path("a/", "a/foo/bar/baz.txt 2025"), + "foo/bar/baz.txt" + ); + assert_eq!( + parse_header_path("a/", "a/foo/bar/baz.txt\t2025"), + "foo/bar/baz.txt" + ); + assert_eq!( + parse_header_path("a/", "a/foo/bar/baz.txt \""), + "foo/bar/baz.txt" + ); + + // Quoted + assert_eq!( + parse_header_path("a/", "a/foo/bar/\"baz quox.txt\""), + "foo/bar/baz quox.txt" + ); + assert_eq!( + parse_header_path("a/", "\"a/foo/bar/baz quox.txt\""), + "foo/bar/baz quox.txt" + ); + assert_eq!( + parse_header_path("a/", "\"foo/bar/baz quox.txt\""), + "foo/bar/baz quox.txt" + ); + assert_eq!(parse_header_path("a/", "\"whatever 🤷\""), "whatever 🤷"); + assert_eq!( + parse_header_path("a/", "\"foo/bar/baz quox.txt\" 2025"), + "foo/bar/baz quox.txt" + ); + // unescaped quotes are dropped + assert_eq!(parse_header_path("a/", "foo/\"bar\""), "foo/bar"); + + // Escaped + assert_eq!( + parse_header_path("a/", "\"foo/\\\"bar\\\"/baz.txt\""), + "foo/\"bar\"/baz.txt" + ); + assert_eq!( + parse_header_path("a/", "\"C:\\\\Projects\\\\My App\\\\old file.txt\""), + "C:\\Projects\\My App\\old file.txt" + ); + } + + #[test] + fn test_parse_diff_with_leading_and_trailing_garbage() { + let diff = indoc! {" + I need to make some changes. + + I'll change the following things: + - one + - two + - three + + ``` + --- a/file.txt + +++ b/file.txt + one + +AND + two + ``` + + Summary of what I did: + - one + - two + - three + + That's about it. + "}; + + let mut events = Vec::new(); + let mut parser = DiffParser::new(diff); + while let Some(event) = parser.next().unwrap() { + events.push(event); + } + + assert_eq!( + events, + &[ + DiffEvent::Hunk { + path: "file.txt".into(), + hunk: Hunk { + context: "one\ntwo\n".into(), + edits: vec![Edit { + range: 4..4, + text: "AND\n".into() + }], + start_line: None, + }, + status: FileStatus::Modified, + }, + DiffEvent::FileEnd { renamed_to: None } + ], + ) + } + + #[test] + fn test_no_newline_at_eof() { + let diff = indoc! {" + --- a/file.py + +++ b/file.py + @@ -55,7 +55,3 @@ class CustomDataset(Dataset): + torch.set_rng_state(state) + mask = self.transform(mask) + + - if self.mode == 'Training': + - return (img, mask, name) + - else: + - return (img, mask, name) + \\ No newline at end of file + "}; + + let mut events = Vec::new(); + let mut parser = DiffParser::new(diff); + while let Some(event) = parser.next().unwrap() { + events.push(event); + } + + assert_eq!( + events, + &[ + DiffEvent::Hunk { + path: "file.py".into(), + hunk: Hunk { + context: concat!( + " torch.set_rng_state(state)\n", + " mask = self.transform(mask)\n", + "\n", + " if self.mode == 'Training':\n", + " return (img, mask, name)\n", + " else:\n", + " return (img, mask, name)", + ) + .into(), + edits: vec![Edit { + range: 80..203, + text: "".into() + }], + start_line: Some(54), // @@ -55,7 -> line 54 (0-indexed) + }, + status: FileStatus::Modified, + }, + DiffEvent::FileEnd { renamed_to: None } + ], + ); + } + + #[test] + fn test_no_newline_at_eof_addition() { + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,3 @@ + context + -deleted + +added line + \\ No newline at end of file + "}; + + let mut events = Vec::new(); + let mut parser = DiffParser::new(diff); + while let Some(event) = parser.next().unwrap() { + events.push(event); + } + + assert_eq!( + events, + &[ + DiffEvent::Hunk { + path: "file.txt".into(), + hunk: Hunk { + context: "context\ndeleted\n".into(), + edits: vec![Edit { + range: 8..16, + text: "added line".into() + }], + start_line: Some(0), // @@ -1,2 -> line 0 (0-indexed) + }, + status: FileStatus::Modified, + }, + DiffEvent::FileEnd { renamed_to: None } + ], + ); + } + + #[test] + fn test_double_no_newline_at_eof() { + // Two consecutive "no newline" markers - the second should be ignored + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + line1 + -old + +new + line3 + \\ No newline at end of file + \\ No newline at end of file + "}; + + let mut events = Vec::new(); + let mut parser = DiffParser::new(diff); + while let Some(event) = parser.next().unwrap() { + events.push(event); + } + + assert_eq!( + events, + &[ + DiffEvent::Hunk { + path: "file.txt".into(), + hunk: Hunk { + context: "line1\nold\nline3".into(), // Only one newline removed + edits: vec![Edit { + range: 6..10, // "old\n" is 4 bytes + text: "new\n".into() + }], + start_line: Some(0), + }, + status: FileStatus::Modified, + }, + DiffEvent::FileEnd { renamed_to: None } + ], + ); + } + + #[test] + fn test_no_newline_after_context_not_addition() { + // "No newline" after context lines should remove newline from context, + // not from an earlier addition + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,4 +1,4 @@ + line1 + -old + +new + line3 + line4 + \\ No newline at end of file + "}; + + let mut events = Vec::new(); + let mut parser = DiffParser::new(diff); + while let Some(event) = parser.next().unwrap() { + events.push(event); + } + + assert_eq!( + events, + &[ + DiffEvent::Hunk { + path: "file.txt".into(), + hunk: Hunk { + // newline removed from line4 (context), not from "new" (addition) + context: "line1\nold\nline3\nline4".into(), + edits: vec![Edit { + range: 6..10, // "old\n" is 4 bytes + text: "new\n".into() // Still has newline + }], + start_line: Some(0), + }, + status: FileStatus::Modified, + }, + DiffEvent::FileEnd { renamed_to: None } + ], + ); + } + + #[test] + fn test_strip_diff_metadata() { + let diff_with_metadata = indoc! {r#" + diff --git a/file.txt b/file.txt + index 1234567..abcdefg 100644 + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,4 @@ + context line + -removed line + +added line + more context + "#}; + + let stripped = strip_diff_metadata(diff_with_metadata); + + assert_eq!( + stripped, + indoc! {r#" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,4 @@ + context line + -removed line + +added line + more context + "#} + ); + } + + #[test] + fn test_apply_diff_to_string_no_trailing_newline() { + // Text without trailing newline; diff generated without + // `\ No newline at end of file` marker. + let text = "line1\nline2\nline3"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + line1 + -line2 + +replaced + line3 + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "line1\nreplaced\nline3"); + } + + #[test] + fn test_apply_diff_to_string_trailing_newline_present() { + // When text has a trailing newline, exact matching still works and + // the fallback is never needed. + let text = "line1\nline2\nline3\n"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + line1 + -line2 + +replaced + line3 + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "line1\nreplaced\nline3\n"); + } + + #[test] + fn test_apply_diff_to_string_deletion_at_end_no_trailing_newline() { + // Deletion of the last line when text has no trailing newline. + // The edit range must be clamped so it doesn't index past the + // end of the text. + let text = "line1\nline2\nline3"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,2 @@ + line1 + line2 + -line3 + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "line1\nline2\n"); + } + + #[test] + fn test_apply_diff_to_string_replace_last_line_no_trailing_newline() { + // Replace the last line when text has no trailing newline. + let text = "aaa\nbbb\nccc"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + aaa + bbb + -ccc + +ddd + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "aaa\nbbb\nddd"); + } + + #[test] + fn test_apply_diff_to_string_multibyte_no_trailing_newline() { + // Multi-byte UTF-8 characters near the end; ensures char boundary + // safety when the fallback clamps edit ranges. + let text = "hello\n세계"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,2 @@ + hello + -세계 + +world + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "hello\nworld"); + } + + #[test] + fn test_find_context_candidates_no_false_positive_mid_text() { + // The stripped fallback must only match at the end of text, not in + // the middle where a real newline exists. + let text = "aaa\nbbb\nccc\n"; + let mut hunk = Hunk { + context: "bbb\n".into(), + edits: vec![], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + // Exact match at offset 4 — the fallback is not used. + assert_eq!(candidates, vec![4]); + } + + #[test] + fn test_find_context_candidates_fallback_at_end() { + let text = "aaa\nbbb"; + let mut hunk = Hunk { + context: "bbb\n".into(), + edits: vec![], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + assert_eq!(candidates, vec![4]); + // Context should be stripped. + assert_eq!(hunk.context, "bbb"); + } + + #[test] + fn test_find_context_candidates_no_fallback_mid_text() { + // "bbb" appears mid-text followed by a newline, so the exact + // match succeeds. Verify the stripped fallback doesn't produce a + // second, spurious candidate. + let text = "aaa\nbbb\nccc"; + let mut hunk = Hunk { + context: "bbb\nccc\n".into(), + edits: vec![], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + // No exact match (text ends without newline after "ccc"), but the + // stripped context "bbb\nccc" matches at offset 4, which is the end. + assert_eq!(candidates, vec![4]); + assert_eq!(hunk.context, "bbb\nccc"); + } + + #[test] + fn test_find_context_candidates_clamps_edit_ranges() { + let text = "aaa\nbbb"; + let mut hunk = Hunk { + context: "aaa\nbbb\n".into(), + edits: vec![Edit { + range: 4..8, // "bbb\n" — end points at the trailing \n + text: "ccc\n".into(), + }], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + assert_eq!(candidates, vec![0]); + // Edit range end should be clamped to 7 (new context length). + assert_eq!(hunk.edits[0].range, 4..7); + } + + #[test] + fn test_unified_diff_with_context_matches_expected_context_window() { + let old_text = "line1\nline2\nline3\nline4\nline5\nCHANGE_ME\nline7\nline8\n"; + let new_text = "line1\nline2\nline3\nline4\nline5\nCHANGED\nline7\nline8\n"; + + let diff_default = unified_diff_with_context(old_text, new_text, 0, 0, 3); + assert_eq!( + diff_default, + "@@ -3,6 +3,6 @@\n line3\n line4\n line5\n-CHANGE_ME\n+CHANGED\n line7\n line8\n" + ); + + let diff_full_context = unified_diff_with_context(old_text, new_text, 0, 0, 8); + assert_eq!( + diff_full_context, + "@@ -1,8 +1,8 @@\n line1\n line2\n line3\n line4\n line5\n-CHANGE_ME\n+CHANGED\n line7\n line8\n" + ); + + let diff_no_context = unified_diff_with_context(old_text, new_text, 0, 0, 0); + assert_eq!(diff_no_context, "@@ -6,1 +6,1 @@\n-CHANGE_ME\n+CHANGED\n"); + } +} diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs index e3aced7ed81d8bf3835a3e711e472651764a314e..49b86404a8ad49c27e29bb2b887fb3fc8171c35c 100644 --- a/crates/zeta_prompt/src/zeta_prompt.rs +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -1,5 +1,6 @@ pub mod excerpt_ranges; pub mod multi_region; +pub mod udiff; use anyhow::{Result, anyhow}; use serde::{Deserialize, Serialize}; @@ -105,10 +106,19 @@ impl std::fmt::Display for ZetaFormat { impl ZetaFormat { pub fn parse(format_name: &str) -> Result { + let lower = format_name.to_lowercase(); + + // Exact case-insensitive match takes priority, bypassing ambiguity checks. + for variant in ZetaFormat::iter() { + if <&'static str>::from(&variant).to_lowercase() == lower { + return Ok(variant); + } + } + let mut results = ZetaFormat::iter().filter(|version| { <&'static str>::from(version) .to_lowercase() - .contains(&format_name.to_lowercase()) + .contains(&lower) }); let Some(result) = results.next() else { anyhow::bail!( @@ -819,11 +829,146 @@ pub fn encode_patch_as_output_for_format( } } +/// Given a `ZetaPromptInput`, a format, and a patch (with cursor already +/// extracted), produce the expected model output string for training. +pub fn format_expected_output( + input: &ZetaPromptInput, + format: ZetaFormat, + patch: &str, + cursor_offset: Option, +) -> Result { + let (context, editable_range, _, _) = resolve_cursor_region(input, format); + let mut old_editable = context[editable_range].to_string(); + if !old_editable.is_empty() && !old_editable.ends_with('\n') { + old_editable.push('\n'); + } + + // Formats with their own output encoding (hashline, variable-edit, + // multi-region empty patches) are handled here. + if let Some(output) = + encode_patch_as_output_for_format(format, &old_editable, patch, cursor_offset)? + { + return Ok(output); + } + + let empty_patch = patch.lines().count() <= 3; + + match format { + // Multi-region formats: non-empty patches need diff application + // then marker-span encoding. + ZetaFormat::V0316SeedMultiRegions => { + let (new_editable, first_hunk_offset) = + udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?; + let cursor_in_new = cursor_in_new_text(cursor_offset, first_hunk_offset, &new_editable); + multi_region::encode_from_old_and_new_v0316( + &old_editable, + &new_editable, + cursor_in_new, + CURSOR_MARKER, + multi_region::V0316_END_MARKER, + ) + } + ZetaFormat::V0318SeedMultiRegions => { + let (new_editable, first_hunk_offset) = + udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?; + let cursor_in_new = cursor_in_new_text(cursor_offset, first_hunk_offset, &new_editable); + multi_region::encode_from_old_and_new_v0318( + &old_editable, + &new_editable, + cursor_in_new, + CURSOR_MARKER, + multi_region::V0318_END_MARKER, + ) + } + ZetaFormat::V0317SeedMultiRegions => { + let (new_editable, first_hunk_offset) = + udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)?; + let cursor_in_new = cursor_in_new_text(cursor_offset, first_hunk_offset, &new_editable); + multi_region::encode_from_old_and_new_v0317( + &old_editable, + &new_editable, + cursor_in_new, + CURSOR_MARKER, + multi_region::V0317_END_MARKER, + ) + } + // V0131-style formats and fallback: produce new editable text with + // cursor marker inserted, followed by the end marker. + _ => { + let (mut result, first_hunk_offset) = if empty_patch { + (old_editable.clone(), None) + } else { + udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable)? + }; + + if let Some(cursor) = cursor_offset { + let hunk_start = if !empty_patch { + first_hunk_offset.unwrap_or(0) + } else { + 0 + }; + let offset = (hunk_start + cursor).min(result.len()); + result.insert_str(offset, CURSOR_MARKER); + } + + if !result.is_empty() && !result.ends_with('\n') { + result.push('\n'); + } + + if let Some(end_marker) = output_end_marker_for_format(format) { + result.push_str(end_marker); + } + + Ok(result) + } + } +} + +/// Compute the cursor position within the new text after diff application. +fn cursor_in_new_text( + cursor_offset: Option, + first_hunk_offset: Option, + new_text: &str, +) -> Option { + cursor_offset.map(|cursor| { + let hunk_start = first_hunk_offset.unwrap_or(0); + (hunk_start + cursor).min(new_text.len()) + }) +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ParsedOutput { /// Text that should replace the editable region pub new_editable_region: String, /// The byte range within `cursor_excerpt` that this replacement applies to pub range_in_excerpt: Range, + /// Byte offset of the cursor marker within `new_editable_region`, if present + pub cursor_offset_in_new_editable_region: Option, +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +pub struct CursorPosition { + pub path: String, + pub row: usize, + pub column: usize, + pub offset: usize, + pub editable_region_offset: usize, +} + +pub fn parsed_output_from_editable_region( + range_in_excerpt: Range, + mut new_editable_region: String, +) -> ParsedOutput { + let cursor_offset_in_new_editable_region = new_editable_region.find(CURSOR_MARKER); + if let Some(offset) = cursor_offset_in_new_editable_region { + new_editable_region.replace_range(offset..offset + CURSOR_MARKER.len(), ""); + } + + ParsedOutput { + new_editable_region, + range_in_excerpt, + cursor_offset_in_new_editable_region, + } } /// Parse model output for the given zeta format @@ -891,12 +1036,97 @@ pub fn parse_zeta2_model_output( let range_in_excerpt = range_in_context.start + context_start..range_in_context.end + context_start; - Ok(ParsedOutput { - new_editable_region: output, - range_in_excerpt, + Ok(parsed_output_from_editable_region(range_in_excerpt, output)) +} + +pub fn parse_zeta2_model_output_as_patch( + output: &str, + format: ZetaFormat, + prompt_inputs: &ZetaPromptInput, +) -> Result { + let parsed = parse_zeta2_model_output(output, format, prompt_inputs)?; + parsed_output_to_patch(prompt_inputs, parsed) +} + +pub fn cursor_position_from_parsed_output( + prompt_inputs: &ZetaPromptInput, + parsed: &ParsedOutput, +) -> Option { + let cursor_offset = parsed.cursor_offset_in_new_editable_region?; + let editable_region_offset = parsed.range_in_excerpt.start; + let excerpt = prompt_inputs.cursor_excerpt.as_ref(); + + let editable_region_start_line = excerpt[..editable_region_offset].matches('\n').count(); + + let new_editable_region = &parsed.new_editable_region; + let prefix_end = cursor_offset.min(new_editable_region.len()); + let new_region_prefix = &new_editable_region[..prefix_end]; + + let row = editable_region_start_line + new_region_prefix.matches('\n').count(); + + let column = match new_region_prefix.rfind('\n') { + Some(last_newline) => cursor_offset - last_newline - 1, + None => { + let content_prefix = &excerpt[..editable_region_offset]; + let content_column = match content_prefix.rfind('\n') { + Some(last_newline) => editable_region_offset - last_newline - 1, + None => editable_region_offset, + }; + content_column + cursor_offset + } + }; + + Some(CursorPosition { + path: prompt_inputs.cursor_path.to_string_lossy().into_owned(), + row, + column, + offset: editable_region_offset + cursor_offset, + editable_region_offset: cursor_offset, }) } +pub fn parsed_output_to_patch( + prompt_inputs: &ZetaPromptInput, + parsed: ParsedOutput, +) -> Result { + let range_in_excerpt = parsed.range_in_excerpt; + let excerpt = prompt_inputs.cursor_excerpt.as_ref(); + let old_text = excerpt[range_in_excerpt.clone()].to_string(); + let mut new_text = parsed.new_editable_region; + + let mut old_text_normalized = old_text; + if !new_text.is_empty() && !new_text.ends_with('\n') { + new_text.push('\n'); + } + if !old_text_normalized.is_empty() && !old_text_normalized.ends_with('\n') { + old_text_normalized.push('\n'); + } + + let editable_region_offset = range_in_excerpt.start; + let editable_region_start_line = excerpt[..editable_region_offset].matches('\n').count() as u32; + let editable_region_lines = old_text_normalized.lines().count() as u32; + + let diff = udiff::unified_diff_with_context( + &old_text_normalized, + &new_text, + editable_region_start_line, + editable_region_start_line, + editable_region_lines, + ); + + let path = prompt_inputs + .cursor_path + .to_string_lossy() + .trim_start_matches('/') + .to_string(); + let formatted_diff = format!("--- a/{path}\n+++ b/{path}\n{diff}"); + + Ok(udiff::encode_cursor_in_patch( + &formatted_diff, + parsed.cursor_offset_in_new_editable_region, + )) +} + pub fn excerpt_range_for_format( format: ZetaFormat, ranges: &ExcerptRanges, @@ -5292,6 +5522,33 @@ mod tests { assert_eq!(apply_edit(excerpt, &output1), "new content\n"); } + #[test] + fn test_parsed_output_to_patch_round_trips_through_udiff_application() { + let excerpt = "before ctx\nctx start\neditable old\nctx end\nafter ctx\n"; + let context_start = excerpt.find("ctx start").unwrap(); + let context_end = excerpt.find("after ctx").unwrap(); + let editable_start = excerpt.find("editable old").unwrap(); + let editable_end = editable_start + "editable old\n".len(); + let input = make_input_with_context_range( + excerpt, + editable_start..editable_end, + context_start..context_end, + editable_start, + ); + + let parsed = parse_zeta2_model_output( + "editable new\n>>>>>>> UPDATED\n", + ZetaFormat::V0131GitMergeMarkersPrefix, + &input, + ) + .unwrap(); + let expected = apply_edit(excerpt, &parsed); + let patch = parsed_output_to_patch(&input, parsed).unwrap(); + let patched = udiff::apply_diff_to_string(&patch, excerpt).unwrap(); + + assert_eq!(patched, expected); + } + #[test] fn test_special_tokens_not_triggered_by_comment_separator() { // Regression test for https://github.com/zed-industries/zed/issues/52489 diff --git a/docs/README.md b/docs/README.md index f03f008223ba1102585c34f3b98bf93a985c1284..38be153de34b7e32e410fa67710297cca653d699 100644 --- a/docs/README.md +++ b/docs/README.md @@ -4,12 +4,15 @@ Welcome to Zed's documentation. This is built on push to `main` and published automatically to [https://zed.dev/docs](https://zed.dev/docs). -To preview the docs locally you will need to install [mdBook](https://rust-lang.github.io/mdBook/) (`cargo install mdbook@0.4.40`) and then run: +To preview the docs locally you will need to install [mdBook](https://rust-lang.github.io/mdBook/) (`cargo install mdbook@0.4.40`), generate the action metadata, and then serve: ```sh +script/generate-action-metadata mdbook serve docs ``` +The first command dumps an action manifest to `crates/docs_preprocessor/actions.json`. Without it, the preprocessor cannot validate keybinding and action references in the docs and will report errors. You only need to re-run it when actions change. + It's important to note the version number above. For an unknown reason, as of 2025-04-23, running 0.4.48 will cause odd URL behavior that breaks things. Before committing, verify that the docs are formatted in the way Prettier expects with: diff --git a/docs/src/ai/agent-panel.md b/docs/src/ai/agent-panel.md index 2da2f37a67edea48e0c34b14cab1ec0fc81a522b..89b0126c55a12b08d4f21a01fea38758c4d509b7 100644 --- a/docs/src/ai/agent-panel.md +++ b/docs/src/ai/agent-panel.md @@ -67,7 +67,9 @@ Right-click on any agent response in the thread view to access a context menu wi ### Navigating the Thread {#navigating-the-thread} -In long conversations, use the scroll arrow buttons at the bottom of the panel to jump to your most recent prompt or to the very beginning of the thread. +In long conversations, use the scroll arrow buttons at the bottom of the panel to jump to your most recent prompt or to the very beginning of the thread. You can also scroll the thread using arrow keys, Page Up/Down, Home/End, and Shift+Page Up/Down to jump between messages, when the thread pane is focused. + +When focus is in the message editor, you can also use {#kb agent::ScrollOutputPageUp}, {#kb agent::ScrollOutputPageDown}, {#kb agent::ScrollOutputToTop}, {#kb agent::ScrollOutputToBottom}, {#kb agent::ScrollOutputLineUp}, and {#kb agent::ScrollOutputLineDown} to navigate the thread, or {#kb agent::ScrollOutputToPreviousMessage} and {#kb agent::ScrollOutputToNextMessage} to jump between your prompts. ### Navigating History {#navigating-history} diff --git a/docs/src/ai/agent-settings.md b/docs/src/ai/agent-settings.md index e1de9fba5e79d56ef73236b2e07c70c93819a2c7..28ee927e4ab4110e6e46a4a8d551093243d72a09 100644 --- a/docs/src/ai/agent-settings.md +++ b/docs/src/ai/agent-settings.md @@ -292,13 +292,16 @@ The default value is `false`. ### Sound Notification -Control whether to hear a notification sound when the agent is done generating changes or needs your input. -The default value is `false`. +Control whether to hear a notification sound when the agent is done generating changes or needs your input. The default value is `never`. + +- `"never"` (default) — Never play the sound. +- `"when_hidden"` — Only play the sound when the agent panel is not visible. +- `"always"` — Always play the sound on completion. ```json [settings] { "agent": { - "play_sound_when_agent_done": true + "play_sound_when_agent_done": "never" } } ``` diff --git a/docs/src/extensions/developing-extensions.md b/docs/src/extensions/developing-extensions.md index 46bed8e223721be81806a3662752d3a4533ab173..01c16dc62be8b9be7e576bc1be10f20437acc993 100644 --- a/docs/src/extensions/developing-extensions.md +++ b/docs/src/extensions/developing-extensions.md @@ -173,8 +173,6 @@ git submodule add https://github.com/your-username/foobar-zed.git extensions/my- git add extensions/my-extension ``` -> **Note:** Your extension must live under te - > All extension submodules must use HTTPS URLs and not SSH URLS (`git@github.com`). 2. Add a new entry to the top-level `extensions.toml` file containing your extension: diff --git a/docs/src/extensions/languages.md b/docs/src/extensions/languages.md index c8e6958db683a5a3e2c9903c590f564b0ef4cb93..121357306e73552140f938197ffc466c0e489484 100644 --- a/docs/src/extensions/languages.md +++ b/docs/src/extensions/languages.md @@ -143,6 +143,21 @@ This query marks strings, object keys, and numbers for highlighting. The followi | @variable.parameter | Captures function/method parameters | | @variant | Captures variants | +#### Fallback captures + +A single Tree-sitter pattern can specify multiple captures on the same node to define fallback highlights. +Zed resolves them right-to-left: It first tries the rightmost capture, and if the current theme has no style for it, falls back to the next capture to the left, and so on. + +For example: + +```scheme +(type_identifier) @type @variable +``` + +Here Zed will first try to resolve `@variable` from the theme. If the theme defines a style for `@variable`, that style is used. Otherwise, Zed falls back to `@type`. + +This is useful when a language wants to provide a preferred highlight that not all themes may support, while still falling back to a more common capture that most themes define. + ### Bracket matching The `brackets.scm` file defines matching brackets. diff --git a/docs/src/performance.md b/docs/src/performance.md index b8f76179e16fcf1f1b886a5c3ef00bcc85aa9ed4..d25ac246f3dbc03ba4286f8e130c566657bbf196 100644 --- a/docs/src/performance.md +++ b/docs/src/performance.md @@ -15,7 +15,7 @@ See [samply](https://github.com/mstange/samply)'s README on how to install and r The profile.json does not contain any symbols. Firefox profiler can add the local symbols to the profile for for. To do that hit the upload local profile button in the top right corner. -image +image # In depth CPU profiling (Tracing) @@ -53,20 +53,40 @@ Download the profiler: Open the profiler (tracy-profiler), you should see zed in the list of `Discovered clients` click it. -image +image Tracy is an incredibly powerful profiler which can do a lot however it's UI is not that friendly. This is not the place for an in depth guide to Tracy, I do however want to highlight one particular workflow that is helpful when figuring out why a piece of code is _sometimes_ slow. Here are the steps: 1. Click the flamechart button at the top. + +Click flamechart + 2. Click on a function that takes a lot of time. + +Click snapshot + 3. Expand the list of function calls by clicking on main thread. + +Click main thread + 4. Filter that list to the slower calls then click on one of the slow calls in the list + +Select the tail calls in the histogram to filter down the list of calls then click on one call + 5. Click zoom to zone to go to that specific function call in the timeline + +Click zoom to zone + 6. Scroll to zoom in and see more detail about the callers + +Scroll to zoom in + 7. Click on a caller to to get statistics on _it_. +Click on any of the zones to get statistics + While normally the blue bars in the Tracy timeline correspond to function calls they can time any part of a codebase. In the example below we have added an extra span "for block in edits" and added metadata to it: the block_height. You can do that like this: ```rust @@ -74,14 +94,6 @@ let span = ztracing::debug_span!("for block in edits", block_height = block.heig let _enter = span.enter(); // span guard, when this is dropped the span ends (and its duration is recorded) ``` -Click flamechart -Click snapshot -Click main thread -Select the tail calls in the histogram to filter down the list of calls then click on one call -Click zoom to zone -Scroll to zoom in -Click on any of the zones to get statistics - # Task/Async profiling Get a profile of the zed foreground executor and background executors. Check if diff --git a/docs/src/reference/all-settings.md b/docs/src/reference/all-settings.md index ce80fe78f4734135bd6bba0f3329a651059dbfdf..3c944e0807ff1a6b0cda46c3416ad4e2dbc5a279 100644 --- a/docs/src/reference/all-settings.md +++ b/docs/src/reference/all-settings.md @@ -3002,21 +3002,36 @@ If you wish to exclude certain hosts from using the proxy, set the `NO_PROXY` en ## Profiles -- Description: Configuration profiles that can be applied on top of existing settings +- Description: Configuration profiles that can be temporarily applied on top of existing settings or Zed's defaults. - Setting: `profiles` - Default: `{}` **Options** -Configuration object for defining settings profiles. Example: +Each profile is an object with the following optional fields: + +- `base`: What settings to start from before applying the profile's overrides. + - `"user"` (default): Apply on top of your current user settings. + - `"default"`: Apply on top of Zed's default settings, ignoring user customizations. +- `settings`: The settings overrides for this profile. + +Examples: ```json [settings] { "profiles": { - "presentation": { - "buffer_font_size": 20, - "ui_font_size": 18, - "theme": "One Light" + "Presentation": { + "settings": { + "buffer_font_size": 20, + "ui_font_size": 18, + "theme": "One Light" + } + }, + "Clean Slate": { + "base": "default", + "settings": { + "theme": "Ayu Dark" + } } } } @@ -5332,12 +5347,12 @@ For example, to use `Nerd Font` as a fallback, add the following to your setting ## Settings Profiles -- Description: Configure any number of settings profiles that are temporarily applied on top of your existing user settings when selected from `settings profile selector: toggle`. +- Description: Configure any number of settings profiles that are temporarily applied when selected from `settings profile selector: toggle`. - Setting: `profiles` - Default: `{}` In your `settings.json` file, add the `profiles` object. -Each key within this object is the name of a settings profile, and each value is an object that can include any of Zed's settings. +Each key within this object is the name of a settings profile. Each profile has an optional `base` field (`"user"` or `"default"`) and a `settings` object containing any of Zed's settings. Example: @@ -5345,24 +5360,30 @@ Example: { "profiles": { "Presenting (Dark)": { - "agent_buffer_font_size": 18.0, - "buffer_font_size": 18.0, - "theme": "One Dark", - "ui_font_size": 18.0 + "settings": { + "agent_buffer_font_size": 18.0, + "buffer_font_size": 18.0, + "theme": "One Dark", + "ui_font_size": 18.0 + } }, "Presenting (Light)": { - "agent_buffer_font_size": 18.0, - "buffer_font_size": 18.0, - "theme": "One Light", - "ui_font_size": 18.0 + "settings": { + "agent_buffer_font_size": 18.0, + "buffer_font_size": 18.0, + "theme": "One Light", + "ui_font_size": 18.0 + } }, "Writing": { - "agent_buffer_font_size": 15.0, - "buffer_font_size": 15.0, - "theme": "Catppuccin Frappé - No Italics", - "ui_font_size": 15.0, - "tab_bar": { "show": false }, - "toolbar": { "breadcrumbs": false } + "settings": { + "agent_buffer_font_size": 15.0, + "buffer_font_size": 15.0, + "theme": "Catppuccin Frappé - No Italics", + "ui_font_size": 15.0, + "tab_bar": { "show": false }, + "toolbar": { "breadcrumbs": false } + } } } } diff --git a/docs/src/tasks.md b/docs/src/tasks.md index b4c9ba8a2abf5ce03e4a9a43fe7fc7e55f9240a4..3bbef85e9760ad036b75d50f26d3536b2e5b20f1 100644 --- a/docs/src/tasks.md +++ b/docs/src/tasks.md @@ -53,9 +53,9 @@ Zed supports ways to spawn (and rerun) commands using its integrated [terminal]( "show_command": true, // Which edited buffers to save before running the task: // * `all` — save all edited buffers - // * `current` — save current buffer only + // * `current` — save currently active buffer only // * `none` — don't save any buffers - "save": "all" + "save": "none" // Represents the tags for inline runnable indicators, or spawning multiple tasks at once. // "tags": [] } diff --git a/docs/src/troubleshooting.md b/docs/src/troubleshooting.md index a852ce779cdb0b719a56e3b12d68ee9b2baab6b7..0ec95cd55e0d127e82430670de9290ec793deb5d 100644 --- a/docs/src/troubleshooting.md +++ b/docs/src/troubleshooting.md @@ -45,13 +45,13 @@ Xcode Instruments (which comes bundled with your [Xcode](https://apps.apple.com/ 1. With Zed running, open Instruments 1. Select `Time Profiler` as the profiling template - ![Instruments template picker with Time Profiler selected](https://images.zed.dev/troubleshooting/instruments-template-picker.webp) + ![Instruments template picker with Time Profiler selected](https://images.zed.dev/docs/troubleshooting/instruments-template-picker.webp) 1. In the `Time Profiler` configuration, set the target to the running Zed process 1. Start recording - ![Time Profiler configuration showing the target dropdown and record button](https://images.zed.dev/troubleshooting/instruments-target-and-record.webp) + ![Time Profiler configuration showing the target dropdown and record button](https://images.zed.dev/docs/troubleshooting/instruments-target-and-record.webp) 1. Perform the action in Zed that causes performance issues 1. Stop recording - ![A completed Time Profiler recording in Instruments](https://images.zed.dev/troubleshooting/instruments-recording.webp) + ![A completed Time Profiler recording in Instruments](https://images.zed.dev/docs/troubleshooting/instruments-recording.webp) 1. Save the trace file 1. Compress the trace file into a zip archive 1. File a [GitHub issue](https://github.com/zed-industries/zed/issues/new/choose) with the trace zip attached diff --git a/docs/src/vim.md b/docs/src/vim.md index 1798f16a93244f2694b30ffa70119da1e4498fdc..8e93edff081681a3e094c811e2d76822766ef67e 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -562,6 +562,7 @@ You can change the following settings to modify vim mode's behavior: | use_system_clipboard | Determines how system clipboard is used:
  • "always": use for all operations
  • "never": only use when explicitly specified
  • "on_yank": use for yank operations
| "always" | | use_multiline_find | deprecated | | use_smartcase_find | If `true`, `f` and `t` motions are case-insensitive when the target letter is lowercase. | false | +| use_regex_search | If `true`, then vim search will use regex mode | true | | gdefault | If `true`, the `:substitute` command replaces all matches in a line by default (as if `g` flag was given). The `g` flag then toggles this, replacing only the first match. | false | | toggle_relative_line_numbers | If `true`, line numbers are relative in normal mode and absolute in insert mode, giving you the best of both options. | false | | custom_digraphs | An object that allows you to add custom digraphs. Read below for an example. | {} | @@ -587,6 +588,7 @@ Here's an example of these settings changed: "default_mode": "insert", "use_system_clipboard": "never", "use_smartcase_find": true, + "use_regex_search": true, "gdefault": true, "toggle_relative_line_numbers": true, "highlight_on_yank_duration": 50, diff --git a/docs/theme/css/general.css b/docs/theme/css/general.css index f63fd24d1379aa3f325ba53a92784ba256a0dd97..9c8077bad525da1b7c15572d6fc154b66602e987 100644 --- a/docs/theme/css/general.css +++ b/docs/theme/css/general.css @@ -70,10 +70,21 @@ h5, h6 { position: relative; font-family: var(--title-font); - font-weight: 480; + font-weight: 400; +} + +h1 { color: var(--title-color); } +h2, +h3, +h4, +h5, +h6 { + color: var(--full-contrast); +} + /* Don't change font size in headers. */ h1 code, h2 code, @@ -213,7 +224,7 @@ hr { } .content { - padding: 48px 32px 0 32px; + padding: 32px 32px 0 32px; display: flex; justify-content: space-between; gap: 36px; @@ -272,10 +283,14 @@ hr { border-radius: 8px; overflow: clip; } -.content .header:link, -.content .header:visited { +.content h1 .header:link, +.content h1 .header:visited { color: var(--title-color); } +.content :is(h2, h3, h4, h5, h6) .header:link, +.content :is(h2, h3, h4, h5, h6) .header:visited { + color: var(--full-contrast); +} .content .header:link, .content .header:visited:hover { text-decoration: none; @@ -383,15 +398,17 @@ blockquote .warning:before { } kbd { - background-color: rgba(8, 76, 207, 0.1); + background-color: var(--keybinding-bg); + padding: 4px 4px 6px 4px; border-radius: 4px; + font-family: var(--mono-font); + display: inline-block; + margin: 0 2px; border: solid 1px var(--popover-border); box-shadow: inset 0 -1px 0 var(--theme-hover); - display: inline-block; font-size: var(--code-font-size); - font-family: var(--mono-font); + color: var(--full-contrast); line-height: 10px; - padding: 4px 5px; vertical-align: middle; } diff --git a/docs/theme/css/variables.css b/docs/theme/css/variables.css index 46ea739daf8643db5ad57a239091e557df2a3d0c..ca43e6feb4a17d67ce0a6140ba1459569bb6e33f 100644 --- a/docs/theme/css/variables.css +++ b/docs/theme/css/variables.css @@ -11,11 +11,12 @@ --page-padding: 15px; --content-max-width: 690px; --menu-bar-height: 64px; - --font: "IA Writer Quattro S", sans-serif; - --title-font: "Lora", "Helvetica Neue", Helvetica, Arial, sans-serif; + --font: "iA Writer Quattro S", sans-serif; + --title-font: + "IBM Plex Serif", "Helvetica Neue", Helvetica, Arial, sans-serif; --mono-font: - ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, Liberation Mono, - Courier New, monospace; + "Lilex", ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, + Liberation Mono, Courier New, monospace; --code-font-size: 0.875em /* please adjust the ace font size accordingly in editor.js */; @@ -151,7 +152,7 @@ --inline-code-color: hsl(40, 100%, 80%); --code-text: hsl(220, 13%, 95%); --code-bg: hsl(220, 93%, 50%, 0.2); - --keybinding-bg: hsl(0, 0%, 12%); + --keybinding-bg: hsl(220, 20%, 10%); --pre-bg: hsl(220, 13%, 5%); --pre-border: hsla(220, 93%, 70%, 0.3); @@ -162,7 +163,7 @@ --popover-shadow: 0 10px 15px -3px hsl(0, 0%, 0%, 0.1), 0 4px 6px -4px hsl(0, 0%, 0%, 0.1); - --theme-hover: hsl(220, 13%, 25%); + --theme-hover: hsl(220, 13%, 20%); --hover-section-title: hsl(220, 13%, 11%); --quote-bg: hsl(220, 13%, 25%, 0.4); diff --git a/docs/theme/fonts/Lora.var.woff2 b/docs/theme/fonts/Lora.var.woff2 deleted file mode 100644 index e2d8990a7ee9fe1f2b02c5d9c23b1e8e13e14de9..0000000000000000000000000000000000000000 Binary files a/docs/theme/fonts/Lora.var.woff2 and /dev/null differ diff --git a/docs/theme/fonts/fonts.css b/docs/theme/fonts/fonts.css index f55cb6ee898ad7c346d7e1774323a70e1fda001f..49a3bd666476efc571f483c5170e882e7e2c436c 100644 --- a/docs/theme/fonts/fonts.css +++ b/docs/theme/fonts/fonts.css @@ -3,15 +3,37 @@ /* open-sans-300 - latin_vietnamese_latin-ext_greek-ext_greek_cyrillic-ext_cyrillic */ @font-face { - font-family: "IA Writer Quattro S"; + font-family: "iA Writer Quattro S"; + src: url("https://cdn.zed.dev/fonts/iAWriterQuattroV.woff2") + format("woff2-variations"); + font-weight: 100 900; font-style: normal; - font-weight: 400; - src: url("iAWriterQuattroS-Regular.woff2") format("woff2"); + font-display: swap; } @font-face { - font-family: "Lora"; - src: url("Lora.var.woff2") format("woff2-variations"); + font-family: "iA Writer Quattro S"; + src: url("https://cdn.zed.dev/fonts/iAWriterQuattroV-Italic.woff2") + format("woff2-variations"); font-weight: 100 900; + font-style: italic; + font-display: swap; +} + +@font-face { + font-family: "IBM Plex Serif"; + src: url("https://cdn.zed.dev/fonts/IBMPlexSerif-Var.woff2") + format("woff2-variations"); + font-weight: 400 700; + font-style: normal; + font-display: swap; +} + +@font-face { + font-family: "Lilex"; + src: url("https://cdn.zed.dev/fonts/Lilex-Regular.woff2") + format("woff2-variations"); + font-weight: 400; font-style: normal; + font-display: swap; } diff --git a/docs/theme/fonts/iAWriterQuattroS-Regular.woff2 b/docs/theme/fonts/iAWriterQuattroS-Regular.woff2 deleted file mode 100644 index a25cdbcdd3f2127e7c2f6d0fe2832a83ae2fc6e5..0000000000000000000000000000000000000000 Binary files a/docs/theme/fonts/iAWriterQuattroS-Regular.woff2 and /dev/null differ diff --git a/docs/theme/page-toc.css b/docs/theme/page-toc.css index 6a16265976c8c9d8861c2791206464f1bcb4ceec..6f88ccc429eb6f29015c26722f2b9cce49807008 100644 --- a/docs/theme/page-toc.css +++ b/docs/theme/page-toc.css @@ -5,7 +5,7 @@ display: flex; flex-direction: column; gap: 4px; - padding: 28px 0 120px 0; + padding: 16px 0 120px 0; width: 200px; max-height: calc(100svh - 50px); overflow-x: hidden; diff --git a/docs/theme/plugins.css b/docs/theme/plugins.css index 8c9f0c438e8e1ecd43cd770183d0a6a3bbfe0a4f..ef59e97072bd2c2a6e580afca79bbe3dafa37f6b 100644 --- a/docs/theme/plugins.css +++ b/docs/theme/plugins.css @@ -1,8 +1,8 @@ kbd.keybinding { background-color: var(--keybinding-bg); - padding: 2px 4px; - border-radius: 3px; - font-family: monospace; + padding: 4px 4px 6px 4px; + border-radius: 4px; + font-family: var(--mono-font); display: inline-block; margin: 0 2px; } diff --git a/extensions/glsl/Cargo.toml b/extensions/glsl/Cargo.toml index 5d7b6ce941c14f68410ac33f825d0ee0b645d6b5..a02c93c0387424255fa32abf8fb027e2d923b809 100644 --- a/extensions/glsl/Cargo.toml +++ b/extensions/glsl/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_glsl" -version = "0.2.2" +version = "0.2.3" edition.workspace = true publish.workspace = true license = "Apache-2.0" diff --git a/extensions/glsl/extension.toml b/extensions/glsl/extension.toml index f866091b84674780e859407ebd893641a3a159ce..1fcc888ebdfc14b1fb94d136c99e2ef6b7008b94 100644 --- a/extensions/glsl/extension.toml +++ b/extensions/glsl/extension.toml @@ -1,7 +1,7 @@ id = "glsl" name = "GLSL" description = "GLSL support." -version = "0.2.2" +version = "0.2.3" schema_version = 1 authors = ["Mikayla Maki "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/glsl/languages/glsl/injections.scm b/extensions/glsl/languages/glsl/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..2f0e58eb6431515b86b6042e5828263341513e99 --- /dev/null +++ b/extensions/glsl/languages/glsl/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) diff --git a/extensions/proto/Cargo.toml b/extensions/proto/Cargo.toml index 68a524ed944b0db1fd75b9ec5ca5e0b1aa99e89f..5ca9720e25fb7cb115004d0de7c47e45d7e6252a 100644 --- a/extensions/proto/Cargo.toml +++ b/extensions/proto/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "zed_proto" -version = "0.3.1" +version = "0.3.2" edition.workspace = true publish.workspace = true license = "Apache-2.0" diff --git a/extensions/proto/extension.toml b/extensions/proto/extension.toml index 70ebed1ca50635d9e818ce216920937a547b64c4..42985998e4dc934f9b6860ee0a5778a097d5723a 100644 --- a/extensions/proto/extension.toml +++ b/extensions/proto/extension.toml @@ -1,7 +1,7 @@ id = "proto" name = "Proto" description = "Protocol Buffers support." -version = "0.3.1" +version = "0.3.2" schema_version = 1 authors = ["Zed Industries "] repository = "https://github.com/zed-industries/zed" diff --git a/extensions/proto/languages/proto/injections.scm b/extensions/proto/languages/proto/injections.scm new file mode 100644 index 0000000000000000000000000000000000000000..2f0e58eb6431515b86b6042e5828263341513e99 --- /dev/null +++ b/extensions/proto/languages/proto/injections.scm @@ -0,0 +1,2 @@ +((comment) @injection.content + (#set! injection.language "comment")) diff --git a/flake.lock b/flake.lock index 4228411894ebc0472e1a2c7fbc0656eb73c5dfe4..c32629aedd533082e43ea3667f1b9cdc6dccfd1b 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "crane": { "locked": { - "lastModified": 1774313767, - "narHash": "sha256-hy0XTQND6avzGEUFrJtYBBpFa/POiiaGBr2vpU6Y9tY=", + "lastModified": 1769737823, + "narHash": "sha256-DrBaNpZ+sJ4stXm+0nBX7zqZT9t9P22zbk6m5YhQxS4=", "owner": "ipetkov", "repo": "crane", - "rev": "3d9df76e29656c679c744968b17fbaf28f0e923d", + "rev": "b2f45c3830aa96b7456a4c4bc327d04d7a43e1ba", "type": "github" }, "original": { @@ -20,11 +20,11 @@ "nixpkgs-lib": "nixpkgs-lib" }, "locked": { - "lastModified": 1772408722, - "narHash": "sha256-rHuJtdcOjK7rAHpHphUb1iCvgkU3GpfvicLMwwnfMT0=", + "lastModified": 1769996383, + "narHash": "sha256-AnYjnFWgS49RlqX7LrC4uA+sCCDBj0Ry/WOJ5XWAsa0=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "f20dc5d9b8027381c474144ecabc9034d6a839a3", + "rev": "57928607ea566b5db3ad13af0e57e921e6b12381", "type": "github" }, "original": { @@ -35,11 +35,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1774709303, - "narHash": "sha256-D3Q07BbIA2KnTcSXIqqu9P586uWxN74zNoCH3h2ESHg=", + "lastModified": 1769789167, + "narHash": "sha256-kKB3bqYJU5nzYeIROI82Ef9VtTbu4uA3YydSk/Bioa8=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "8110df5ad7abf5d4c0f6fb0f8f978390e77f9685", + "rev": "62c8382960464ceb98ea593cb8321a2cf8f9e3e5", "type": "github" }, "original": { @@ -51,11 +51,11 @@ }, "nixpkgs-lib": { "locked": { - "lastModified": 1772328832, - "narHash": "sha256-e+/T/pmEkLP6BHhYjx6GmwP5ivonQQn0bJdH9YrRB+Q=", + "lastModified": 1769909678, + "narHash": "sha256-cBEymOf4/o3FD5AZnzC3J9hLbiZ+QDT/KDuyHXVJOpM=", "owner": "nix-community", "repo": "nixpkgs.lib", - "rev": "c185c7a5e5dd8f9add5b2f8ebeff00888b070742", + "rev": "72716169fe93074c333e8d0173151350670b824c", "type": "github" }, "original": { @@ -79,11 +79,11 @@ ] }, "locked": { - "lastModified": 1774840424, - "narHash": "sha256-3Oi4mBKzOCFQYLUyEjyc0s5cnlNj1MzmhpVKoLptpe8=", + "lastModified": 1775013181, + "narHash": "sha256-zPrt6oNM1r/RO5bWYaZ3hthfG9vzkr6kQdoqDd5x4Qw=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "d9f52b51548e76ab8b6e7d647763047ebdec835c", + "rev": "e8046c1d9ccadd497c2344d8fa49dab62f22f7be", "type": "github" }, "original": { diff --git a/nix/build.nix b/nix/build.nix index 9270abbe6f747e0ed78400d13561eadd97edd184..2f283f83a4d8b215d12933178f1e9b3b33617067 100644 --- a/nix/build.nix +++ b/nix/build.nix @@ -38,6 +38,8 @@ libxfixes, libxkbcommon, libxrandr, + libx11, + libxcb, nodejs_22, openssl, perl, @@ -181,8 +183,8 @@ let wayland gpu-lib libglvnd - xorg.libX11 - xorg.libxcb + libx11 + libxcb libdrm libgbm libva diff --git a/nix/livekit-libwebrtc/package.nix b/nix/livekit-libwebrtc/package.nix index 4c0d99926200e619b567cf7a90549f4f882eda42..2a07f5c2170e2db00eb1547b2b820e015f8683ff 100644 --- a/nix/livekit-libwebrtc/package.nix +++ b/nix/livekit-libwebrtc/package.nix @@ -37,6 +37,8 @@ libxfixes, libxrandr, libxtst, + libx11, + libxi, pipewire, xorg, }: @@ -224,8 +226,8 @@ stdenv.mkDerivation { libxrandr libxtst pipewire - xorg.libX11 - xorg.libXi + libx11 + libxi ]); preConfigure = '' diff --git a/script/docs-suggest-publish b/script/docs-suggest-publish index 23578785159b5fd720e84d3658f7f76dddf3ada9..fc420f3fbc774df0dbd7667a5cd6dd76682e9548 100755 --- a/script/docs-suggest-publish +++ b/script/docs-suggest-publish @@ -131,14 +131,14 @@ if [[ "$DRY_RUN" == "true" ]]; then echo "Would auto-apply suggestions to docs via Droid and create a draft PR." echo "Model: $MODEL" echo "" - + # Show each suggestion file for file in $(echo "$MANIFEST" | jq -r '.suggestions[].file'); do echo "--- $file ---" git show "origin/$SUGGESTIONS_BRANCH:$file" 2>/dev/null || echo "(file not found)" echo "" done - + echo -e "${YELLOW}=== END DRY RUN ===${NC}" echo "" echo "Run without --dry-run to create the PR." @@ -213,7 +213,7 @@ fi FLAGGED_PRS=() FLAGS_FILE="$REPO_ROOT/crates/feature_flags/src/flags.rs" if [[ -f "$FLAGS_FILE" ]]; then - # Extract feature flag struct names (e.g. SubagentsFeatureFlag, GitGraphFeatureFlag) + # Extract feature flag struct names (e.g. SubagentsFeatureFlag) FLAG_NAMES=$(grep -oE 'pub struct \w+FeatureFlag' "$FLAGS_FILE" | awk '{print $3}') if [[ -n "$FLAG_NAMES" ]]; then FLAG_PATTERN=$(echo "$FLAG_NAMES" | tr '\n' '|' | sed 's/|$//') @@ -538,10 +538,10 @@ echo -e "${GREEN}PR created:${NC} $PR_URL" if [[ "$KEEP_QUEUE" != "true" ]]; then echo "" echo "Resetting suggestions queue..." - + git checkout --orphan "${SUGGESTIONS_BRANCH}-reset" git rm -rf . > /dev/null 2>&1 || true - + cat > README.md << 'EOF' # Documentation Suggestions Queue @@ -562,19 +562,19 @@ run `script/docs-suggest-publish` to create a documentation PR from these sugges 3. At preview release, suggestions are collected into a docs PR 4. After docs PR is created, this branch is reset EOF - + mkdir -p suggestions echo '{"suggestions":[]}' > manifest.json git add README.md suggestions manifest.json git commit -m "Reset documentation suggestions queue Previous suggestions published in: $PR_URL" - + # Force push required: replacing the orphan suggestions branch with a clean slate git push -f origin "${SUGGESTIONS_BRANCH}-reset:$SUGGESTIONS_BRANCH" git checkout "$ORIGINAL_BRANCH" git branch -D "${SUGGESTIONS_BRANCH}-reset" - + echo "Suggestions queue reset." else git checkout "$ORIGINAL_BRANCH" diff --git a/tooling/compliance/Cargo.toml b/tooling/compliance/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..9b1ade359daa4b7a02beff861c94e01fff071f84 --- /dev/null +++ b/tooling/compliance/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "compliance" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[features] +octo-client = ["dep:octocrab", "dep:jsonwebtoken", "dep:futures", "dep:tokio"] + +[dependencies] +anyhow.workspace = true +async-trait.workspace = true +derive_more.workspace = true +futures = { workspace = true, optional = true } +itertools.workspace = true +jsonwebtoken = { version = "10.2", features = ["use_pem"], optional = true } +octocrab = { version = "0.49", default-features = false, features = [ + "default-client", + "jwt-aws-lc-rs", + "retry", + "rustls", + "rustls-aws-lc-rs", + "stream", + "timeout" +], optional = true } +regex.workspace = true +semver.workspace = true +serde.workspace = true +serde_json.workspace = true +tokio = { workspace = true, optional = true } + +[dev-dependencies] +indoc.workspace = true +tokio = { workspace = true, features = ["rt", "macros"] } diff --git a/tooling/compliance/LICENSE-GPL b/tooling/compliance/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/tooling/compliance/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/tooling/compliance/src/checks.rs b/tooling/compliance/src/checks.rs new file mode 100644 index 0000000000000000000000000000000000000000..a0623fbbbc179edf9f5b6d777b3116ff498f0265 --- /dev/null +++ b/tooling/compliance/src/checks.rs @@ -0,0 +1,647 @@ +use std::{fmt, ops::Not as _}; + +use itertools::Itertools as _; + +use crate::{ + git::{CommitDetails, CommitList}, + github::{ + CommitAuthor, GitHubClient, GitHubUser, GithubLogin, PullRequestComment, PullRequestData, + PullRequestReview, ReviewState, + }, + report::Report, +}; + +const ZED_ZIPPY_COMMENT_APPROVAL_PATTERN: &str = "@zed-zippy approve"; +const ZED_ZIPPY_GROUP_APPROVAL: &str = "@zed-industries/approved"; + +#[derive(Debug)] +pub enum ReviewSuccess { + ApprovingComment(Vec), + CoAuthored(Vec), + ExternalMergedContribution { merged_by: GitHubUser }, + PullRequestReviewed(Vec), +} + +impl ReviewSuccess { + pub(crate) fn reviewers(&self) -> anyhow::Result { + let reviewers = match self { + Self::CoAuthored(authors) => authors.iter().map(ToString::to_string).collect_vec(), + Self::PullRequestReviewed(reviews) => reviews + .iter() + .filter_map(|review| review.user.as_ref()) + .map(|user| format!("@{}", user.login)) + .collect_vec(), + Self::ApprovingComment(comments) => comments + .iter() + .map(|comment| format!("@{}", comment.user.login)) + .collect_vec(), + Self::ExternalMergedContribution { merged_by } => { + vec![format!("@{}", merged_by.login)] + } + }; + + let reviewers = reviewers.into_iter().unique().collect_vec(); + + reviewers + .is_empty() + .not() + .then(|| reviewers.join(", ")) + .ok_or_else(|| anyhow::anyhow!("Expected at least one reviewer")) + } +} + +impl fmt::Display for ReviewSuccess { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::CoAuthored(_) => formatter.write_str("Co-authored by an organization member"), + Self::PullRequestReviewed(_) => { + formatter.write_str("Approved by an organization review") + } + Self::ApprovingComment(_) => { + formatter.write_str("Approved by an organization approval comment") + } + Self::ExternalMergedContribution { .. } => { + formatter.write_str("External merged contribution") + } + } + } +} + +#[derive(Debug)] +pub enum ReviewFailure { + // todo: We could still query the GitHub API here to search for one + NoPullRequestFound, + Unreviewed, + UnableToDetermineReviewer, + Other(anyhow::Error), +} + +impl fmt::Display for ReviewFailure { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::NoPullRequestFound => formatter.write_str("No pull request found"), + Self::Unreviewed => formatter + .write_str("No qualifying organization approval found for the pull request"), + Self::UnableToDetermineReviewer => formatter.write_str("Could not determine reviewer"), + Self::Other(error) => write!(formatter, "Failed to inspect review state: {error}"), + } + } +} + +pub(crate) type ReviewResult = Result; + +impl> From for ReviewFailure { + fn from(err: E) -> Self { + Self::Other(anyhow::anyhow!(err)) + } +} + +pub struct Reporter<'a> { + commits: CommitList, + github_client: &'a GitHubClient, +} + +impl<'a> Reporter<'a> { + pub fn new(commits: CommitList, github_client: &'a GitHubClient) -> Self { + Self { + commits, + github_client, + } + } + + /// Method that checks every commit for compliance + async fn check_commit(&self, commit: &CommitDetails) -> Result { + let Some(pr_number) = commit.pr_number() else { + return Err(ReviewFailure::NoPullRequestFound); + }; + + let pull_request = self.github_client.get_pull_request(pr_number).await?; + + if let Some(approval) = self.check_pull_request_approved(&pull_request).await? { + return Ok(approval); + } + + if let Some(approval) = self + .check_approving_pull_request_comment(&pull_request) + .await? + { + return Ok(approval); + } + + if let Some(approval) = self.check_commit_co_authors(commit).await? { + return Ok(approval); + } + + // if let Some(approval) = self.check_external_merged_pr(pr_number).await? { + // return Ok(approval); + // } + + Err(ReviewFailure::Unreviewed) + } + + async fn check_commit_co_authors( + &self, + commit: &CommitDetails, + ) -> Result, ReviewFailure> { + if commit.co_authors().is_some() + && let Some(commit_authors) = self + .github_client + .get_commit_authors([commit.sha()]) + .await? + .get(commit.sha()) + .and_then(|authors| authors.co_authors()) + { + let mut org_co_authors = Vec::new(); + for co_author in commit_authors { + if let Some(github_login) = co_author.user() + && self + .github_client + .check_org_membership(github_login) + .await? + { + org_co_authors.push(co_author.clone()); + } + } + + Ok(org_co_authors + .is_empty() + .not() + .then_some(ReviewSuccess::CoAuthored(org_co_authors))) + } else { + Ok(None) + } + } + + #[allow(unused)] + async fn check_external_merged_pr( + &self, + pull_request: PullRequestData, + ) -> Result, ReviewFailure> { + if let Some(user) = pull_request.user + && self + .github_client + .check_org_membership(&GithubLogin::new(user.login)) + .await? + .not() + { + pull_request.merged_by.map_or( + Err(ReviewFailure::UnableToDetermineReviewer), + |merged_by| { + Ok(Some(ReviewSuccess::ExternalMergedContribution { + merged_by, + })) + }, + ) + } else { + Ok(None) + } + } + + async fn check_pull_request_approved( + &self, + pull_request: &PullRequestData, + ) -> Result, ReviewFailure> { + let pr_reviews = self + .github_client + .get_pull_request_reviews(pull_request.number) + .await?; + + if !pr_reviews.is_empty() { + let mut org_approving_reviews = Vec::new(); + for review in pr_reviews { + if let Some(github_login) = review.user.as_ref() + && pull_request + .user + .as_ref() + .is_none_or(|pr_user| pr_user.login != github_login.login) + && review + .state + .is_some_and(|state| state == ReviewState::Approved) + && self + .github_client + .check_org_membership(&GithubLogin::new(github_login.login.clone())) + .await? + { + org_approving_reviews.push(review); + } + } + + Ok(org_approving_reviews + .is_empty() + .not() + .then_some(ReviewSuccess::PullRequestReviewed(org_approving_reviews))) + } else { + Ok(None) + } + } + + async fn check_approving_pull_request_comment( + &self, + pull_request: &PullRequestData, + ) -> Result, ReviewFailure> { + let other_comments = self + .github_client + .get_pull_request_comments(pull_request.number) + .await?; + + if !other_comments.is_empty() { + let mut org_approving_comments = Vec::new(); + + for comment in other_comments { + if pull_request + .user + .as_ref() + .is_some_and(|pr_author| pr_author.login != comment.user.login) + && comment.body.as_ref().is_some_and(|body| { + body.contains(ZED_ZIPPY_COMMENT_APPROVAL_PATTERN) + || body.contains(ZED_ZIPPY_GROUP_APPROVAL) + }) + && self + .github_client + .check_org_membership(&GithubLogin::new(comment.user.login.clone())) + .await? + { + org_approving_comments.push(comment); + } + } + + Ok(org_approving_comments + .is_empty() + .not() + .then_some(ReviewSuccess::ApprovingComment(org_approving_comments))) + } else { + Ok(None) + } + } + + pub async fn generate_report(mut self) -> anyhow::Result { + let mut report = Report::new(); + + let commits_to_check = std::mem::take(&mut self.commits); + let total_commits = commits_to_check.len(); + + for (i, commit) in commits_to_check.into_iter().enumerate() { + println!( + "Checking commit {:?} ({current}/{total})", + commit.sha().short(), + current = i + 1, + total = total_commits + ); + + let review_result = self.check_commit(&commit).await; + + if let Err(err) = &review_result { + println!("Commit {:?} failed review: {:?}", commit.sha().short(), err); + } + + report.add(commit, review_result); + } + + Ok(report) + } +} + +#[cfg(test)] +mod tests { + use std::rc::Rc; + use std::str::FromStr; + + use crate::git::{CommitDetails, CommitList, CommitSha}; + use crate::github::{ + AuthorsForCommits, GitHubApiClient, GitHubClient, GitHubUser, GithubLogin, + PullRequestComment, PullRequestData, PullRequestReview, ReviewState, + }; + + use super::{Reporter, ReviewFailure, ReviewSuccess}; + + struct MockGitHubApi { + pull_request: PullRequestData, + reviews: Vec, + comments: Vec, + commit_authors_json: serde_json::Value, + org_members: Vec, + } + + #[async_trait::async_trait(?Send)] + impl GitHubApiClient for MockGitHubApi { + async fn get_pull_request(&self, _pr_number: u64) -> anyhow::Result { + Ok(self.pull_request.clone()) + } + + async fn get_pull_request_reviews( + &self, + _pr_number: u64, + ) -> anyhow::Result> { + Ok(self.reviews.clone()) + } + + async fn get_pull_request_comments( + &self, + _pr_number: u64, + ) -> anyhow::Result> { + Ok(self.comments.clone()) + } + + async fn get_commit_authors( + &self, + _commit_shas: &[&CommitSha], + ) -> anyhow::Result { + serde_json::from_value(self.commit_authors_json.clone()).map_err(Into::into) + } + + async fn check_org_membership(&self, login: &GithubLogin) -> anyhow::Result { + Ok(self + .org_members + .iter() + .any(|member| member == login.as_str())) + } + + async fn ensure_pull_request_has_label( + &self, + _label: &str, + _pr_number: u64, + ) -> anyhow::Result<()> { + Ok(()) + } + } + + fn make_commit( + sha: &str, + author_name: &str, + author_email: &str, + title: &str, + body: &str, + ) -> CommitDetails { + let formatted = format!( + "{sha}|field-delimiter|{author_name}|field-delimiter|{author_email}|field-delimiter|\ + {title}|body-delimiter|{body}|commit-delimiter|" + ); + CommitList::from_str(&formatted) + .expect("test commit should parse") + .into_iter() + .next() + .expect("should have one commit") + } + + fn review(login: &str, state: ReviewState) -> PullRequestReview { + PullRequestReview { + user: Some(GitHubUser { + login: login.to_owned(), + }), + state: Some(state), + } + } + + fn comment(login: &str, body: &str) -> PullRequestComment { + PullRequestComment { + user: GitHubUser { + login: login.to_owned(), + }, + body: Some(body.to_owned()), + } + } + + struct TestScenario { + pull_request: PullRequestData, + reviews: Vec, + comments: Vec, + commit_authors_json: serde_json::Value, + org_members: Vec, + commit: CommitDetails, + } + + impl TestScenario { + fn single_commit() -> Self { + Self { + pull_request: PullRequestData { + number: 1234, + user: Some(GitHubUser { + login: "alice".to_owned(), + }), + merged_by: None, + }, + reviews: vec![], + comments: vec![], + commit_authors_json: serde_json::json!({}), + org_members: vec![], + commit: make_commit( + "abc12345abc12345", + "Alice", + "alice@test.com", + "Fix thing (#1234)", + "", + ), + } + } + + fn with_reviews(mut self, reviews: Vec) -> Self { + self.reviews = reviews; + self + } + + fn with_comments(mut self, comments: Vec) -> Self { + self.comments = comments; + self + } + + fn with_org_members(mut self, members: Vec<&str>) -> Self { + self.org_members = members.into_iter().map(str::to_owned).collect(); + self + } + + fn with_commit_authors_json(mut self, json: serde_json::Value) -> Self { + self.commit_authors_json = json; + self + } + + fn with_commit(mut self, commit: CommitDetails) -> Self { + self.commit = commit; + self + } + + async fn run_scenario(self) -> Result { + let mock = MockGitHubApi { + pull_request: self.pull_request, + reviews: self.reviews, + comments: self.comments, + commit_authors_json: self.commit_authors_json, + org_members: self.org_members, + }; + let client = GitHubClient::new(Rc::new(mock)); + let reporter = Reporter::new(CommitList::default(), &client); + reporter.check_commit(&self.commit).await + } + } + + #[tokio::test] + async fn approved_review_by_org_member_succeeds() { + let result = TestScenario::single_commit() + .with_reviews(vec![review("bob", ReviewState::Approved)]) + .with_org_members(vec!["bob"]) + .run_scenario() + .await; + assert!(matches!(result, Ok(ReviewSuccess::PullRequestReviewed(_)))); + } + + #[tokio::test] + async fn non_approved_review_state_is_not_accepted() { + let result = TestScenario::single_commit() + .with_reviews(vec![review("bob", ReviewState::Other)]) + .with_org_members(vec!["bob"]) + .run_scenario() + .await; + assert!(matches!(result, Err(ReviewFailure::Unreviewed))); + } + + #[tokio::test] + async fn review_by_non_org_member_is_not_accepted() { + let result = TestScenario::single_commit() + .with_reviews(vec![review("bob", ReviewState::Approved)]) + .run_scenario() + .await; + assert!(matches!(result, Err(ReviewFailure::Unreviewed))); + } + + #[tokio::test] + async fn pr_author_own_approval_review_is_rejected() { + let result = TestScenario::single_commit() + .with_reviews(vec![review("alice", ReviewState::Approved)]) + .with_org_members(vec!["alice"]) + .run_scenario() + .await; + assert!(matches!(result, Err(ReviewFailure::Unreviewed))); + } + + #[tokio::test] + async fn pr_author_own_approval_comment_is_rejected() { + let result = TestScenario::single_commit() + .with_comments(vec![comment("alice", "@zed-zippy approve")]) + .with_org_members(vec!["alice"]) + .run_scenario() + .await; + assert!(matches!(result, Err(ReviewFailure::Unreviewed))); + } + + #[tokio::test] + async fn approval_comment_by_org_member_succeeds() { + let result = TestScenario::single_commit() + .with_comments(vec![comment("bob", "@zed-zippy approve")]) + .with_org_members(vec!["bob"]) + .run_scenario() + .await; + assert!(matches!(result, Ok(ReviewSuccess::ApprovingComment(_)))); + } + + #[tokio::test] + async fn group_approval_comment_by_org_member_succeeds() { + let result = TestScenario::single_commit() + .with_comments(vec![comment("bob", "@zed-industries/approved")]) + .with_org_members(vec!["bob"]) + .run_scenario() + .await; + assert!(matches!(result, Ok(ReviewSuccess::ApprovingComment(_)))); + } + + #[tokio::test] + async fn comment_without_approval_pattern_is_not_accepted() { + let result = TestScenario::single_commit() + .with_comments(vec![comment("bob", "looks good")]) + .with_org_members(vec!["bob"]) + .run_scenario() + .await; + assert!(matches!(result, Err(ReviewFailure::Unreviewed))); + } + + #[tokio::test] + async fn commit_without_pr_number_is_no_pr_found() { + let result = TestScenario::single_commit() + .with_commit(make_commit( + "abc12345abc12345", + "Alice", + "alice@test.com", + "Fix thing without PR number", + "", + )) + .run_scenario() + .await; + assert!(matches!(result, Err(ReviewFailure::NoPullRequestFound))); + } + + #[tokio::test] + async fn pr_review_takes_precedence_over_comment() { + let result = TestScenario::single_commit() + .with_reviews(vec![review("bob", ReviewState::Approved)]) + .with_comments(vec![comment("charlie", "@zed-zippy approve")]) + .with_org_members(vec!["bob", "charlie"]) + .run_scenario() + .await; + assert!(matches!(result, Ok(ReviewSuccess::PullRequestReviewed(_)))); + } + + #[tokio::test] + async fn comment_takes_precedence_over_co_author() { + let result = TestScenario::single_commit() + .with_comments(vec![comment("bob", "@zed-zippy approve")]) + .with_commit_authors_json(serde_json::json!({ + "abc12345abc12345": { + "author": { + "name": "Alice", + "email": "alice@test.com", + "user": { "login": "alice" } + }, + "authors": [{ + "name": "Charlie", + "email": "charlie@test.com", + "user": { "login": "charlie" } + }] + } + })) + .with_commit(make_commit( + "abc12345abc12345", + "Alice", + "alice@test.com", + "Fix thing (#1234)", + "Co-authored-by: Charlie ", + )) + .with_org_members(vec!["bob", "charlie"]) + .run_scenario() + .await; + assert!(matches!(result, Ok(ReviewSuccess::ApprovingComment(_)))); + } + + #[tokio::test] + async fn co_author_org_member_succeeds() { + let result = TestScenario::single_commit() + .with_commit_authors_json(serde_json::json!({ + "abc12345abc12345": { + "author": { + "name": "Alice", + "email": "alice@test.com", + "user": { "login": "alice" } + }, + "authors": [{ + "name": "Bob", + "email": "bob@test.com", + "user": { "login": "bob" } + }] + } + })) + .with_commit(make_commit( + "abc12345abc12345", + "Alice", + "alice@test.com", + "Fix thing (#1234)", + "Co-authored-by: Bob ", + )) + .with_org_members(vec!["bob"]) + .run_scenario() + .await; + assert!(matches!(result, Ok(ReviewSuccess::CoAuthored(_)))); + } + + #[tokio::test] + async fn no_reviews_no_comments_no_coauthors_is_unreviewed() { + let result = TestScenario::single_commit().run_scenario().await; + assert!(matches!(result, Err(ReviewFailure::Unreviewed))); + } +} diff --git a/tooling/compliance/src/git.rs b/tooling/compliance/src/git.rs new file mode 100644 index 0000000000000000000000000000000000000000..fa2cb725712de82526d4ce717c2ec3dc97d22885 --- /dev/null +++ b/tooling/compliance/src/git.rs @@ -0,0 +1,591 @@ +#![allow(clippy::disallowed_methods, reason = "This is only used in xtasks")] +use std::{ + fmt::{self, Debug}, + ops::Not, + process::Command, + str::FromStr, + sync::LazyLock, +}; + +use anyhow::{Context, Result, anyhow}; +use derive_more::{Deref, DerefMut, FromStr}; + +use itertools::Itertools; +use regex::Regex; +use semver::Version; +use serde::Deserialize; + +pub trait Subcommand { + type ParsedOutput: FromStr; + + fn args(&self) -> impl IntoIterator; +} + +#[derive(Deref, DerefMut)] +pub struct GitCommand { + #[deref] + #[deref_mut] + subcommand: G, +} + +impl GitCommand { + #[must_use] + pub fn run(subcommand: G) -> Result { + Self { subcommand }.run_impl() + } + + fn run_impl(self) -> Result { + let command_output = Command::new("git") + .args(self.subcommand.args()) + .output() + .context("Failed to spawn command")?; + + if command_output.status.success() { + String::from_utf8(command_output.stdout) + .map_err(|_| anyhow!("Invalid UTF8")) + .and_then(|s| { + G::ParsedOutput::from_str(s.trim()) + .map_err(|e| anyhow!("Failed to parse from string: {e:?}")) + }) + } else { + anyhow::bail!( + "Command failed with exit code {}, stderr: {}", + command_output.status.code().unwrap_or_default(), + String::from_utf8(command_output.stderr).unwrap_or_default() + ) + } + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub enum ReleaseChannel { + Stable, + Preview, +} + +impl ReleaseChannel { + pub(crate) fn tag_suffix(&self) -> &'static str { + match self { + ReleaseChannel::Stable => "", + ReleaseChannel::Preview => "-pre", + } + } +} + +#[derive(Debug, Clone)] +pub struct VersionTag(Version, ReleaseChannel); + +impl VersionTag { + pub fn parse(input: &str) -> Result { + // Being a bit more lenient for human inputs + let version = input.strip_prefix('v').unwrap_or(input); + + let (version_str, channel) = version + .strip_suffix("-pre") + .map_or((version, ReleaseChannel::Stable), |version_str| { + (version_str, ReleaseChannel::Preview) + }); + + Version::parse(version_str) + .map(|version| Self(version, channel)) + .map_err(|_| anyhow::anyhow!("Failed to parse version from tag!")) + } + + pub fn version(&self) -> &Version { + &self.0 + } +} + +impl ToString for VersionTag { + fn to_string(&self) -> String { + format!( + "v{version}{channel_suffix}", + version = self.0, + channel_suffix = self.1.tag_suffix() + ) + } +} + +#[derive(Debug, Deref, FromStr, PartialEq, Eq, Hash, Deserialize)] +pub struct CommitSha(pub(crate) String); + +impl CommitSha { + pub fn short(&self) -> &str { + self.0.as_str().split_at(8).0 + } +} + +#[derive(Debug)] +pub struct CommitDetails { + sha: CommitSha, + author: Committer, + title: String, + body: String, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Committer { + name: String, + email: String, +} + +impl Committer { + pub fn new(name: &str, email: &str) -> Self { + Self { + name: name.to_owned(), + email: email.to_owned(), + } + } +} + +impl fmt::Display for Committer { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(formatter, "{} ({})", self.name, self.email) + } +} + +impl CommitDetails { + const BODY_DELIMITER: &str = "|body-delimiter|"; + const COMMIT_DELIMITER: &str = "|commit-delimiter|"; + const FIELD_DELIMITER: &str = "|field-delimiter|"; + const FORMAT_STRING: &str = "%H|field-delimiter|%an|field-delimiter|%ae|field-delimiter|%s|body-delimiter|%b|commit-delimiter|"; + + fn parse(line: &str, body: &str) -> Result { + let Some([sha, author_name, author_email, title]) = + line.splitn(4, Self::FIELD_DELIMITER).collect_array() + else { + return Err(anyhow!("Failed to parse commit fields from input {line}")); + }; + + Ok(CommitDetails { + sha: CommitSha(sha.to_owned()), + author: Committer::new(author_name, author_email), + title: title.to_owned(), + body: body.to_owned(), + }) + } + + pub fn pr_number(&self) -> Option { + // Since we use squash merge, all commit titles end with the '(#12345)' pattern. + // While we could strictly speaking index into this directly, go for a slightly + // less prone approach to errors + const PATTERN: &str = " (#"; + self.title + .rfind(PATTERN) + .and_then(|location| { + self.title[location..] + .find(')') + .map(|relative_end| location + PATTERN.len()..location + relative_end) + }) + .and_then(|range| self.title[range].parse().ok()) + } + + pub(crate) fn co_authors(&self) -> Option> { + static CO_AUTHOR_REGEX: LazyLock = + LazyLock::new(|| Regex::new(r"Co-authored-by: (.+) <(.+)>").unwrap()); + + let mut co_authors = Vec::new(); + + for cap in CO_AUTHOR_REGEX.captures_iter(&self.body.as_ref()) { + let Some((name, email)) = cap + .get(1) + .map(|m| m.as_str()) + .zip(cap.get(2).map(|m| m.as_str())) + else { + continue; + }; + co_authors.push(Committer::new(name, email)); + } + + co_authors.is_empty().not().then_some(co_authors) + } + + pub(crate) fn author(&self) -> &Committer { + &self.author + } + + pub(crate) fn title(&self) -> &str { + &self.title + } + + pub(crate) fn sha(&self) -> &CommitSha { + &self.sha + } +} + +#[derive(Debug, Deref, Default, DerefMut)] +pub struct CommitList(Vec); + +impl CommitList { + pub fn range(&self) -> Option { + self.0 + .first() + .zip(self.0.last()) + .map(|(first, last)| format!("{}..{}", first.sha().0, last.sha().0)) + } +} + +impl IntoIterator for CommitList { + type IntoIter = std::vec::IntoIter; + type Item = CommitDetails; + + fn into_iter(self) -> std::vec::IntoIter { + self.0.into_iter() + } +} + +impl FromStr for CommitList { + type Err = anyhow::Error; + + fn from_str(input: &str) -> Result { + Ok(CommitList( + input + .split(CommitDetails::COMMIT_DELIMITER) + .filter(|commit_details| !commit_details.is_empty()) + .map(|commit_details| { + let (line, body) = commit_details + .trim() + .split_once(CommitDetails::BODY_DELIMITER) + .expect("Missing body delimiter"); + CommitDetails::parse(line, body) + .expect("Parsing from the output should succeed") + }) + .collect(), + )) + } +} + +pub struct GetVersionTags; + +impl Subcommand for GetVersionTags { + type ParsedOutput = VersionTagList; + + fn args(&self) -> impl IntoIterator { + ["tag", "-l", "v*"].map(ToOwned::to_owned) + } +} + +pub struct VersionTagList(Vec); + +impl VersionTagList { + pub fn sorted(mut self) -> Self { + self.0.sort_by(|a, b| a.version().cmp(b.version())); + self + } + + pub fn find_previous_minor_version(&self, version_tag: &VersionTag) -> Option<&VersionTag> { + self.0 + .iter() + .take_while(|tag| tag.version() < version_tag.version()) + .collect_vec() + .into_iter() + .rev() + .find(|tag| { + (tag.version().major < version_tag.version().major + || (tag.version().major == version_tag.version().major + && tag.version().minor < version_tag.version().minor)) + && tag.version().patch == 0 + }) + } +} + +impl FromStr for VersionTagList { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let version_tags = s.lines().flat_map(VersionTag::parse).collect_vec(); + + version_tags + .is_empty() + .not() + .then_some(Self(version_tags)) + .ok_or_else(|| anyhow::anyhow!("No version tags found")) + } +} + +pub struct CommitsFromVersionToHead { + version_tag: VersionTag, + branch: String, +} + +impl CommitsFromVersionToHead { + pub fn new(version_tag: VersionTag, branch: String) -> Self { + Self { + version_tag, + branch, + } + } +} + +impl Subcommand for CommitsFromVersionToHead { + type ParsedOutput = CommitList; + + fn args(&self) -> impl IntoIterator { + [ + "log".to_string(), + format!("--pretty=format:{}", CommitDetails::FORMAT_STRING), + format!( + "{version}..{branch}", + version = self.version_tag.to_string(), + branch = self.branch + ), + ] + } +} + +pub struct NoOutput; + +impl FromStr for NoOutput { + type Err = anyhow::Error; + + fn from_str(_: &str) -> Result { + Ok(NoOutput) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use indoc::indoc; + + #[test] + fn parse_stable_version_tag() { + let tag = VersionTag::parse("v0.172.8").unwrap(); + assert_eq!(tag.version().major, 0); + assert_eq!(tag.version().minor, 172); + assert_eq!(tag.version().patch, 8); + assert_eq!(tag.1, ReleaseChannel::Stable); + } + + #[test] + fn parse_preview_version_tag() { + let tag = VersionTag::parse("v0.172.1-pre").unwrap(); + assert_eq!(tag.version().major, 0); + assert_eq!(tag.version().minor, 172); + assert_eq!(tag.version().patch, 1); + assert_eq!(tag.1, ReleaseChannel::Preview); + } + + #[test] + fn parse_version_tag_without_v_prefix() { + let tag = VersionTag::parse("0.172.8").unwrap(); + assert_eq!(tag.version().major, 0); + assert_eq!(tag.version().minor, 172); + assert_eq!(tag.version().patch, 8); + } + + #[test] + fn parse_invalid_version_tag() { + let result = VersionTag::parse("vConradTest"); + assert!(result.is_err()); + } + + #[test] + fn version_tag_stable_roundtrip() { + let tag = VersionTag::parse("v0.172.8").unwrap(); + assert_eq!(tag.to_string(), "v0.172.8"); + } + + #[test] + fn version_tag_preview_roundtrip() { + let tag = VersionTag::parse("v0.172.1-pre").unwrap(); + assert_eq!(tag.to_string(), "v0.172.1-pre"); + } + + #[test] + fn sorted_orders_by_semver() { + let input = indoc! {" + v0.172.8 + v0.170.1 + v0.171.4 + v0.170.2 + v0.172.11 + v0.171.3 + v0.172.9 + "}; + let list = VersionTagList::from_str(input).unwrap().sorted(); + for window in list.0.windows(2) { + assert!( + window[0].version() <= window[1].version(), + "{} should come before {}", + window[0].to_string(), + window[1].to_string() + ); + } + assert_eq!(list.0[0].to_string(), "v0.170.1"); + assert_eq!(list.0[list.0.len() - 1].to_string(), "v0.172.11"); + } + + #[test] + fn find_previous_minor_for_173_returns_172() { + let input = indoc! {" + v0.170.1 + v0.170.2 + v0.171.3 + v0.171.4 + v0.172.0 + v0.172.8 + v0.172.9 + v0.172.11 + "}; + let list = VersionTagList::from_str(input).unwrap().sorted(); + let target = VersionTag::parse("v0.173.0").unwrap(); + let previous = list.find_previous_minor_version(&target).unwrap(); + assert_eq!(previous.version().major, 0); + assert_eq!(previous.version().minor, 172); + assert_eq!(previous.version().patch, 0); + } + + #[test] + fn find_previous_minor_skips_same_minor() { + let input = indoc! {" + v0.172.8 + v0.172.9 + v0.172.11 + "}; + let list = VersionTagList::from_str(input).unwrap().sorted(); + let target = VersionTag::parse("v0.172.8").unwrap(); + assert!(list.find_previous_minor_version(&target).is_none()); + } + + #[test] + fn find_previous_minor_with_major_version_gap() { + let input = indoc! {" + v0.172.0 + v0.172.9 + v0.172.11 + "}; + let list = VersionTagList::from_str(input).unwrap().sorted(); + let target = VersionTag::parse("v1.0.0").unwrap(); + let previous = list.find_previous_minor_version(&target).unwrap(); + assert_eq!(previous.to_string(), "v0.172.0"); + } + + #[test] + fn find_previous_minor_requires_zero_patch_version() { + let input = indoc! {" + v0.172.1 + v0.172.9 + v0.172.11 + "}; + let list = VersionTagList::from_str(input).unwrap().sorted(); + let target = VersionTag::parse("v1.0.0").unwrap(); + assert!(list.find_previous_minor_version(&target).is_none()); + } + + #[test] + fn parse_tag_list_from_real_tags() { + let input = indoc! {" + v0.9999-temporary + vConradTest + v0.172.8 + "}; + let list = VersionTagList::from_str(input).unwrap(); + assert_eq!(list.0.len(), 1); + assert_eq!(list.0[0].to_string(), "v0.172.8"); + } + + #[test] + fn parse_empty_tag_list_fails() { + let result = VersionTagList::from_str(""); + assert!(result.is_err()); + } + + #[test] + fn pr_number_from_squash_merge_title() { + let line = format!( + "abc123{d}Author Name{d}author@email.com{d}Add cool feature (#12345)", + d = CommitDetails::FIELD_DELIMITER + ); + let commit = CommitDetails::parse(&line, "").unwrap(); + assert_eq!(commit.pr_number(), Some(12345)); + } + + #[test] + fn pr_number_missing() { + let line = format!( + "abc123{d}Author Name{d}author@email.com{d}Some commit without PR ref", + d = CommitDetails::FIELD_DELIMITER + ); + let commit = CommitDetails::parse(&line, "").unwrap(); + assert_eq!(commit.pr_number(), None); + } + + #[test] + fn pr_number_takes_last_match() { + let line = format!( + "abc123{d}Author Name{d}author@email.com{d}Fix (#123) and refactor (#456)", + d = CommitDetails::FIELD_DELIMITER + ); + let commit = CommitDetails::parse(&line, "").unwrap(); + assert_eq!(commit.pr_number(), Some(456)); + } + + #[test] + fn co_authors_parsed_from_body() { + let line = format!( + "abc123{d}Author Name{d}author@email.com{d}Some title", + d = CommitDetails::FIELD_DELIMITER + ); + let body = indoc! {" + Co-authored-by: Alice Smith + Co-authored-by: Bob Jones + "}; + let commit = CommitDetails::parse(&line, body).unwrap(); + let co_authors = commit.co_authors().unwrap(); + assert_eq!(co_authors.len(), 2); + assert_eq!( + co_authors[0], + Committer::new("Alice Smith", "alice@example.com") + ); + assert_eq!( + co_authors[1], + Committer::new("Bob Jones", "bob@example.com") + ); + } + + #[test] + fn no_co_authors_returns_none() { + let line = format!( + "abc123{d}Author Name{d}author@email.com{d}Some title", + d = CommitDetails::FIELD_DELIMITER + ); + let commit = CommitDetails::parse(&line, "").unwrap(); + assert!(commit.co_authors().is_none()); + } + + #[test] + fn commit_sha_short_returns_first_8_chars() { + let sha = CommitSha("abcdef1234567890abcdef1234567890abcdef12".into()); + assert_eq!(sha.short(), "abcdef12"); + } + + #[test] + fn parse_commit_list_from_git_log_format() { + let fd = CommitDetails::FIELD_DELIMITER; + let bd = CommitDetails::BODY_DELIMITER; + let cd = CommitDetails::COMMIT_DELIMITER; + + let input = format!( + "sha111{fd}Alice{fd}alice@test.com{fd}First commit (#100){bd}First body{cd}sha222{fd}Bob{fd}bob@test.com{fd}Second commit (#200){bd}Second body{cd}" + ); + + let list = CommitList::from_str(&input).unwrap(); + assert_eq!(list.0.len(), 2); + + assert_eq!(list.0[0].sha().0, "sha111"); + assert_eq!( + list.0[0].author(), + &Committer::new("Alice", "alice@test.com") + ); + assert_eq!(list.0[0].title(), "First commit (#100)"); + assert_eq!(list.0[0].pr_number(), Some(100)); + assert_eq!(list.0[0].body, "First body"); + + assert_eq!(list.0[1].sha().0, "sha222"); + assert_eq!(list.0[1].author(), &Committer::new("Bob", "bob@test.com")); + assert_eq!(list.0[1].title(), "Second commit (#200)"); + assert_eq!(list.0[1].pr_number(), Some(200)); + assert_eq!(list.0[1].body, "Second body"); + } +} diff --git a/tooling/compliance/src/github.rs b/tooling/compliance/src/github.rs new file mode 100644 index 0000000000000000000000000000000000000000..ebd2f2c75f5d0083632a8f70e3ea9dd2680d4eb5 --- /dev/null +++ b/tooling/compliance/src/github.rs @@ -0,0 +1,424 @@ +use std::{collections::HashMap, fmt, ops::Not, rc::Rc}; + +use anyhow::Result; +use derive_more::Deref; +use serde::Deserialize; + +use crate::git::CommitSha; + +pub const PR_REVIEW_LABEL: &str = "PR state:needs review"; + +#[derive(Debug, Clone)] +pub struct GitHubUser { + pub login: String, +} + +#[derive(Debug, Clone)] +pub struct PullRequestData { + pub number: u64, + pub user: Option, + pub merged_by: Option, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ReviewState { + Approved, + Other, +} + +#[derive(Debug, Clone)] +pub struct PullRequestReview { + pub user: Option, + pub state: Option, +} + +#[derive(Debug, Clone)] +pub struct PullRequestComment { + pub user: GitHubUser, + pub body: Option, +} + +#[derive(Debug, Deserialize, Clone, Deref, PartialEq, Eq)] +pub struct GithubLogin { + login: String, +} + +impl GithubLogin { + pub(crate) fn new(login: String) -> Self { + Self { login } + } +} + +impl fmt::Display for GithubLogin { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(formatter, "@{}", self.login) + } +} + +#[derive(Debug, Deserialize, Clone)] +pub struct CommitAuthor { + name: String, + email: String, + user: Option, +} + +impl CommitAuthor { + pub(crate) fn user(&self) -> Option<&GithubLogin> { + self.user.as_ref() + } +} + +impl PartialEq for CommitAuthor { + fn eq(&self, other: &Self) -> bool { + self.user.as_ref().zip(other.user.as_ref()).map_or_else( + || self.email == other.email || self.name == other.name, + |(l, r)| l == r, + ) + } +} + +impl fmt::Display for CommitAuthor { + fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.user.as_ref() { + Some(user) => write!(formatter, "{} ({user})", self.name), + None => write!(formatter, "{} ({})", self.name, self.email), + } + } +} + +#[derive(Debug, Deserialize)] +pub struct CommitAuthors { + #[serde(rename = "author")] + primary_author: CommitAuthor, + #[serde(rename = "authors")] + co_authors: Vec, +} + +impl CommitAuthors { + pub fn co_authors(&self) -> Option> { + self.co_authors.is_empty().not().then(|| { + self.co_authors + .iter() + .filter(|co_author| *co_author != &self.primary_author) + }) + } +} + +#[derive(Debug, Deserialize, Deref)] +pub struct AuthorsForCommits(HashMap); + +#[async_trait::async_trait(?Send)] +pub trait GitHubApiClient { + async fn get_pull_request(&self, pr_number: u64) -> Result; + async fn get_pull_request_reviews(&self, pr_number: u64) -> Result>; + async fn get_pull_request_comments(&self, pr_number: u64) -> Result>; + async fn get_commit_authors(&self, commit_shas: &[&CommitSha]) -> Result; + async fn check_org_membership(&self, login: &GithubLogin) -> Result; + async fn ensure_pull_request_has_label(&self, label: &str, pr_number: u64) -> Result<()>; +} + +pub struct GitHubClient { + api: Rc, +} + +impl GitHubClient { + pub fn new(api: Rc) -> Self { + Self { api } + } + + #[cfg(feature = "octo-client")] + pub async fn for_app(app_id: u64, app_private_key: &str) -> Result { + let client = OctocrabClient::new(app_id, app_private_key).await?; + Ok(Self::new(Rc::new(client))) + } + + pub async fn get_pull_request(&self, pr_number: u64) -> Result { + self.api.get_pull_request(pr_number).await + } + + pub async fn get_pull_request_reviews(&self, pr_number: u64) -> Result> { + self.api.get_pull_request_reviews(pr_number).await + } + + pub async fn get_pull_request_comments( + &self, + pr_number: u64, + ) -> Result> { + self.api.get_pull_request_comments(pr_number).await + } + + pub async fn get_commit_authors<'a>( + &self, + commit_shas: impl IntoIterator, + ) -> Result { + let shas: Vec<&CommitSha> = commit_shas.into_iter().collect(); + self.api.get_commit_authors(&shas).await + } + + pub async fn check_org_membership(&self, login: &GithubLogin) -> Result { + self.api.check_org_membership(login).await + } + + pub async fn add_label_to_pull_request(&self, label: &str, pr_number: u64) -> Result<()> { + self.api + .ensure_pull_request_has_label(label, pr_number) + .await + } +} + +#[cfg(feature = "octo-client")] +mod octo_client { + use anyhow::{Context, Result}; + use futures::TryStreamExt as _; + use itertools::Itertools; + use jsonwebtoken::EncodingKey; + use octocrab::{ + Octocrab, Page, models::pulls::ReviewState as OctocrabReviewState, + service::middleware::cache::mem::InMemoryCache, + }; + use serde::de::DeserializeOwned; + use tokio::pin; + + use crate::git::CommitSha; + + use super::{ + AuthorsForCommits, GitHubApiClient, GitHubUser, GithubLogin, PullRequestComment, + PullRequestData, PullRequestReview, ReviewState, + }; + + const PAGE_SIZE: u8 = 100; + const ORG: &str = "zed-industries"; + const REPO: &str = "zed"; + + pub struct OctocrabClient { + client: Octocrab, + } + + impl OctocrabClient { + pub async fn new(app_id: u64, app_private_key: &str) -> Result { + let octocrab = Octocrab::builder() + .cache(InMemoryCache::new()) + .app( + app_id.into(), + EncodingKey::from_rsa_pem(app_private_key.as_bytes())?, + ) + .build()?; + + let installations = octocrab + .apps() + .installations() + .send() + .await + .context("Failed to fetch installations")? + .take_items(); + + let installation_id = installations + .into_iter() + .find(|installation| installation.account.login == ORG) + .context("Could not find Zed repository in installations")? + .id; + + let client = octocrab.installation(installation_id)?; + Ok(Self { client }) + } + + fn build_co_authors_query<'a>(shas: impl IntoIterator) -> String { + const FRAGMENT: &str = r#" + ... on Commit { + author { + name + email + user { login } + } + authors(first: 10) { + nodes { + name + email + user { login } + } + } + } + "#; + + let objects: String = shas + .into_iter() + .map(|commit_sha| { + format!( + "commit{sha}: object(oid: \"{sha}\") {{ {FRAGMENT} }}", + sha = **commit_sha + ) + }) + .join("\n"); + + format!("{{ repository(owner: \"{ORG}\", name: \"{REPO}\") {{ {objects} }} }}") + .replace("\n", "") + } + + async fn graphql( + &self, + query: &serde_json::Value, + ) -> octocrab::Result { + self.client.graphql(query).await + } + + async fn get_all( + &self, + page: Page, + ) -> octocrab::Result> { + self.get_filtered(page, |_| true).await + } + + async fn get_filtered( + &self, + page: Page, + predicate: impl Fn(&T) -> bool, + ) -> octocrab::Result> { + let stream = page.into_stream(&self.client); + pin!(stream); + + let mut results = Vec::new(); + + while let Some(item) = stream.try_next().await? + && predicate(&item) + { + results.push(item); + } + + Ok(results) + } + } + + #[async_trait::async_trait(?Send)] + impl GitHubApiClient for OctocrabClient { + async fn get_pull_request(&self, pr_number: u64) -> Result { + let pr = self.client.pulls(ORG, REPO).get(pr_number).await?; + Ok(PullRequestData { + number: pr.number, + user: pr.user.map(|user| GitHubUser { login: user.login }), + merged_by: pr.merged_by.map(|user| GitHubUser { login: user.login }), + }) + } + + async fn get_pull_request_reviews(&self, pr_number: u64) -> Result> { + let page = self + .client + .pulls(ORG, REPO) + .list_reviews(pr_number) + .per_page(PAGE_SIZE) + .send() + .await?; + + let reviews = self.get_all(page).await?; + + Ok(reviews + .into_iter() + .map(|review| PullRequestReview { + user: review.user.map(|user| GitHubUser { login: user.login }), + state: review.state.map(|state| match state { + OctocrabReviewState::Approved => ReviewState::Approved, + _ => ReviewState::Other, + }), + }) + .collect()) + } + + async fn get_pull_request_comments( + &self, + pr_number: u64, + ) -> Result> { + let page = self + .client + .issues(ORG, REPO) + .list_comments(pr_number) + .per_page(PAGE_SIZE) + .send() + .await?; + + let comments = self.get_all(page).await?; + + Ok(comments + .into_iter() + .map(|comment| PullRequestComment { + user: GitHubUser { + login: comment.user.login, + }, + body: comment.body, + }) + .collect()) + } + + async fn get_commit_authors( + &self, + commit_shas: &[&CommitSha], + ) -> Result { + let query = Self::build_co_authors_query(commit_shas.iter().copied()); + let query = serde_json::json!({ "query": query }); + let mut response = self.graphql::(&query).await?; + + response + .get_mut("data") + .and_then(|data| data.get_mut("repository")) + .and_then(|repo| repo.as_object_mut()) + .ok_or_else(|| anyhow::anyhow!("Unexpected response format!")) + .and_then(|commit_data| { + let mut response_map = serde_json::Map::with_capacity(commit_data.len()); + + for (key, value) in commit_data.iter_mut() { + let key_without_prefix = key.strip_prefix("commit").unwrap_or(key); + if let Some(authors) = value.get_mut("authors") { + if let Some(nodes) = authors.get("nodes") { + *authors = nodes.clone(); + } + } + + response_map.insert(key_without_prefix.to_owned(), value.clone()); + } + + serde_json::from_value(serde_json::Value::Object(response_map)) + .context("Failed to deserialize commit authors") + }) + } + + async fn check_org_membership(&self, login: &GithubLogin) -> Result { + let page = self + .client + .orgs(ORG) + .list_members() + .per_page(PAGE_SIZE) + .send() + .await?; + + let members = self.get_all(page).await?; + + Ok(members + .into_iter() + .any(|member| member.login == login.as_str())) + } + + async fn ensure_pull_request_has_label(&self, label: &str, pr_number: u64) -> Result<()> { + if self + .get_filtered( + self.client + .issues(ORG, REPO) + .list_labels_for_issue(pr_number) + .per_page(PAGE_SIZE) + .send() + .await?, + |pr_label| pr_label.name == label, + ) + .await + .is_ok_and(|l| l.is_empty()) + { + self.client + .issues(ORG, REPO) + .add_labels(pr_number, &[label.to_owned()]) + .await?; + } + + Ok(()) + } + } +} + +#[cfg(feature = "octo-client")] +pub use octo_client::OctocrabClient; diff --git a/tooling/compliance/src/lib.rs b/tooling/compliance/src/lib.rs new file mode 100644 index 0000000000000000000000000000000000000000..9476412c6d6d1f56b1396bf5d700924549c707da --- /dev/null +++ b/tooling/compliance/src/lib.rs @@ -0,0 +1,4 @@ +pub mod checks; +pub mod git; +pub mod github; +pub mod report; diff --git a/tooling/compliance/src/report.rs b/tooling/compliance/src/report.rs new file mode 100644 index 0000000000000000000000000000000000000000..16df145394726b97382884fbdfdc3164c0029786 --- /dev/null +++ b/tooling/compliance/src/report.rs @@ -0,0 +1,446 @@ +use std::{ + fs::{self, File}, + io::{BufWriter, Write}, + path::Path, +}; + +use anyhow::Context as _; +use derive_more::Display; +use itertools::{Either, Itertools}; + +use crate::{ + checks::{ReviewFailure, ReviewResult, ReviewSuccess}, + git::CommitDetails, +}; + +const PULL_REQUEST_BASE_URL: &str = "https://github.com/zed-industries/zed/pull"; + +#[derive(Debug)] +pub struct ReportEntry { + pub commit: CommitDetails, + reason: R, +} + +impl ReportEntry { + fn commit_cell(&self) -> String { + let title = escape_markdown_link_text(self.commit.title()); + + match self.commit.pr_number() { + Some(pr_number) => format!("[{title}]({PULL_REQUEST_BASE_URL}/{pr_number})"), + None => escape_markdown_table_text(self.commit.title()), + } + } + + fn pull_request_cell(&self) -> String { + self.commit + .pr_number() + .map(|pr_number| format!("#{pr_number}")) + .unwrap_or_else(|| "—".to_owned()) + } + + fn author_cell(&self) -> String { + escape_markdown_table_text(&self.commit.author().to_string()) + } + + fn reason_cell(&self) -> String { + escape_markdown_table_text(&self.reason.to_string()) + } +} + +impl ReportEntry { + fn issue_kind(&self) -> IssueKind { + match self.reason { + ReviewFailure::Other(_) => IssueKind::Error, + _ => IssueKind::NotReviewed, + } + } +} + +impl ReportEntry { + fn reviewers_cell(&self) -> String { + match &self.reason.reviewers() { + Ok(reviewers) => escape_markdown_table_text(&reviewers), + Err(_) => "—".to_owned(), + } + } +} + +#[derive(Debug, Default)] +pub struct ReportSummary { + pub pull_requests: usize, + pub reviewed: usize, + pub not_reviewed: usize, + pub errors: usize, +} + +pub enum ReportReviewSummary { + MissingReviews, + MissingReviewsWithErrors, + NoIssuesFound, +} + +impl ReportSummary { + fn from_entries(entries: &[ReportEntry]) -> Self { + Self { + pull_requests: entries + .iter() + .filter_map(|entry| entry.commit.pr_number()) + .unique() + .count(), + reviewed: entries.iter().filter(|entry| entry.reason.is_ok()).count(), + not_reviewed: entries + .iter() + .filter(|entry| { + matches!( + entry.reason, + Err(ReviewFailure::NoPullRequestFound | ReviewFailure::Unreviewed) + ) + }) + .count(), + errors: entries + .iter() + .filter(|entry| matches!(entry.reason, Err(ReviewFailure::Other(_)))) + .count(), + } + } + + pub fn review_summary(&self) -> ReportReviewSummary { + match self.not_reviewed { + 0 if self.errors == 0 => ReportReviewSummary::NoIssuesFound, + 1.. if self.errors == 0 => ReportReviewSummary::MissingReviews, + _ => ReportReviewSummary::MissingReviewsWithErrors, + } + } + + fn has_errors(&self) -> bool { + self.errors > 0 + } +} + +#[derive(Clone, Copy, Debug, Display, PartialEq, Eq, PartialOrd, Ord)] +enum IssueKind { + #[display("Error")] + Error, + #[display("Not reviewed")] + NotReviewed, +} + +#[derive(Debug, Default)] +pub struct Report { + entries: Vec>, +} + +impl Report { + pub fn new() -> Self { + Self::default() + } + + pub fn add(&mut self, commit: CommitDetails, result: ReviewResult) { + self.entries.push(ReportEntry { + commit, + reason: result, + }); + } + + pub fn errors(&self) -> impl Iterator> { + self.entries.iter().filter(|entry| entry.reason.is_err()) + } + + pub fn summary(&self) -> ReportSummary { + ReportSummary::from_entries(&self.entries) + } + + pub fn write_markdown(self, path: impl AsRef) -> anyhow::Result<()> { + let path = path.as_ref(); + + if let Some(parent) = path + .parent() + .filter(|parent| !parent.as_os_str().is_empty()) + { + fs::create_dir_all(parent).with_context(|| { + format!( + "Failed to create parent directory for markdown report at {}", + path.display() + ) + })?; + } + + let summary = self.summary(); + let (successes, mut issues): (Vec<_>, Vec<_>) = + self.entries + .into_iter() + .partition_map(|entry| match entry.reason { + Ok(success) => Either::Left(ReportEntry { + reason: success, + commit: entry.commit, + }), + Err(fail) => Either::Right(ReportEntry { + reason: fail, + commit: entry.commit, + }), + }); + + issues.sort_by_key(|entry| entry.issue_kind()); + + let file = File::create(path) + .with_context(|| format!("Failed to create markdown report at {}", path.display()))?; + let mut writer = BufWriter::new(file); + + writeln!(writer, "# Compliance report")?; + writeln!(writer)?; + writeln!(writer, "## Overview")?; + writeln!(writer)?; + writeln!(writer, "- PRs: {}", summary.pull_requests)?; + writeln!(writer, "- Reviewed: {}", summary.reviewed)?; + writeln!(writer, "- Not reviewed: {}", summary.not_reviewed)?; + if summary.has_errors() { + writeln!(writer, "- Errors: {}", summary.errors)?; + } + writeln!(writer)?; + + write_issue_table(&mut writer, &issues, &summary)?; + write_success_table(&mut writer, &successes)?; + + writer + .flush() + .with_context(|| format!("Failed to flush markdown report to {}", path.display())) + } +} + +fn write_issue_table( + writer: &mut impl Write, + issues: &[ReportEntry], + summary: &ReportSummary, +) -> std::io::Result<()> { + if summary.has_errors() { + writeln!(writer, "## Errors and unreviewed commits")?; + } else { + writeln!(writer, "## Unreviewed commits")?; + } + writeln!(writer)?; + + if issues.is_empty() { + if summary.has_errors() { + writeln!(writer, "No errors or unreviewed commits found.")?; + } else { + writeln!(writer, "No unreviewed commits found.")?; + } + writeln!(writer)?; + return Ok(()); + } + + writeln!(writer, "| Commit | PR | Author | Outcome | Reason |")?; + writeln!(writer, "| --- | --- | --- | --- | --- |")?; + + for entry in issues { + let issue_kind = entry.issue_kind(); + writeln!( + writer, + "| {} | {} | {} | {} | {} |", + entry.commit_cell(), + entry.pull_request_cell(), + entry.author_cell(), + issue_kind, + entry.reason_cell(), + )?; + } + + writeln!(writer)?; + Ok(()) +} + +fn write_success_table( + writer: &mut impl Write, + successful_entries: &[ReportEntry], +) -> std::io::Result<()> { + writeln!(writer, "## Successful commits")?; + writeln!(writer)?; + + if successful_entries.is_empty() { + writeln!(writer, "No successful commits found.")?; + writeln!(writer)?; + return Ok(()); + } + + writeln!(writer, "| Commit | PR | Author | Reviewers | Reason |")?; + writeln!(writer, "| --- | --- | --- | --- | --- |")?; + + for entry in successful_entries { + writeln!( + writer, + "| {} | {} | {} | {} | {} |", + entry.commit_cell(), + entry.pull_request_cell(), + entry.author_cell(), + entry.reviewers_cell(), + entry.reason_cell(), + )?; + } + + writeln!(writer)?; + Ok(()) +} + +fn escape_markdown_link_text(input: &str) -> String { + escape_markdown_table_text(input) + .replace('[', r"\[") + .replace(']', r"\]") +} + +fn escape_markdown_table_text(input: &str) -> String { + input + .replace('\\', r"\\") + .replace('|', r"\|") + .replace('\r', "") + .replace('\n', "
") +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use crate::{ + checks::{ReviewFailure, ReviewSuccess}, + git::{CommitDetails, CommitList}, + github::{GitHubUser, PullRequestReview, ReviewState}, + }; + + use super::{Report, ReportReviewSummary}; + + fn make_commit( + sha: &str, + author_name: &str, + author_email: &str, + title: &str, + body: &str, + ) -> CommitDetails { + let formatted = format!( + "{sha}|field-delimiter|{author_name}|field-delimiter|{author_email}|field-delimiter|{title}|body-delimiter|{body}|commit-delimiter|" + ); + CommitList::from_str(&formatted) + .expect("test commit should parse") + .into_iter() + .next() + .expect("should have one commit") + } + + fn reviewed() -> ReviewSuccess { + ReviewSuccess::PullRequestReviewed(vec![PullRequestReview { + user: Some(GitHubUser { + login: "reviewer".to_owned(), + }), + state: Some(ReviewState::Approved), + }]) + } + + #[test] + fn report_summary_counts_are_accurate() { + let mut report = Report::new(); + + report.add( + make_commit( + "aaa", + "Alice", + "alice@test.com", + "Reviewed commit (#100)", + "", + ), + Ok(reviewed()), + ); + report.add( + make_commit("bbb", "Bob", "bob@test.com", "Unreviewed commit (#200)", ""), + Err(ReviewFailure::Unreviewed), + ); + report.add( + make_commit("ccc", "Carol", "carol@test.com", "No PR commit", ""), + Err(ReviewFailure::NoPullRequestFound), + ); + report.add( + make_commit("ddd", "Dave", "dave@test.com", "Error commit (#300)", ""), + Err(ReviewFailure::Other(anyhow::anyhow!("some error"))), + ); + + let summary = report.summary(); + assert_eq!(summary.pull_requests, 3); + assert_eq!(summary.reviewed, 1); + assert_eq!(summary.not_reviewed, 2); + assert_eq!(summary.errors, 1); + } + + #[test] + fn report_summary_all_reviewed_is_no_issues() { + let mut report = Report::new(); + + report.add( + make_commit("aaa", "Alice", "alice@test.com", "First (#100)", ""), + Ok(reviewed()), + ); + report.add( + make_commit("bbb", "Bob", "bob@test.com", "Second (#200)", ""), + Ok(reviewed()), + ); + + let summary = report.summary(); + assert!(matches!( + summary.review_summary(), + ReportReviewSummary::NoIssuesFound + )); + } + + #[test] + fn report_summary_missing_reviews_only() { + let mut report = Report::new(); + + report.add( + make_commit("aaa", "Alice", "alice@test.com", "Reviewed (#100)", ""), + Ok(reviewed()), + ); + report.add( + make_commit("bbb", "Bob", "bob@test.com", "Unreviewed (#200)", ""), + Err(ReviewFailure::Unreviewed), + ); + + let summary = report.summary(); + assert!(matches!( + summary.review_summary(), + ReportReviewSummary::MissingReviews + )); + } + + #[test] + fn report_summary_errors_and_missing_reviews() { + let mut report = Report::new(); + + report.add( + make_commit("aaa", "Alice", "alice@test.com", "Unreviewed (#100)", ""), + Err(ReviewFailure::Unreviewed), + ); + report.add( + make_commit("bbb", "Bob", "bob@test.com", "Errored (#200)", ""), + Err(ReviewFailure::Other(anyhow::anyhow!("check failed"))), + ); + + let summary = report.summary(); + assert!(matches!( + summary.review_summary(), + ReportReviewSummary::MissingReviewsWithErrors + )); + } + + #[test] + fn report_summary_deduplicates_pull_requests() { + let mut report = Report::new(); + + report.add( + make_commit("aaa", "Alice", "alice@test.com", "First change (#100)", ""), + Ok(reviewed()), + ); + report.add( + make_commit("bbb", "Bob", "bob@test.com", "Second change (#100)", ""), + Ok(reviewed()), + ); + + let summary = report.summary(); + assert_eq!(summary.pull_requests, 1); + } +} diff --git a/tooling/xtask/Cargo.toml b/tooling/xtask/Cargo.toml index 21090d1304ea0eab9ad70808b91f76789f2fd923..f9628dfa6390872210df9f3cc00b367d9420f522 100644 --- a/tooling/xtask/Cargo.toml +++ b/tooling/xtask/Cargo.toml @@ -15,7 +15,8 @@ backtrace.workspace = true cargo_metadata.workspace = true cargo_toml.workspace = true clap = { workspace = true, features = ["derive"] } -toml.workspace = true +compliance = { workspace = true, features = ["octo-client"] } +gh-workflow.workspace = true indoc.workspace = true indexmap.workspace = true itertools.workspace = true @@ -24,5 +25,6 @@ serde.workspace = true serde_json.workspace = true serde_yaml = "0.9.34" strum.workspace = true +tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } +toml.workspace = true toml_edit.workspace = true -gh-workflow.workspace = true diff --git a/tooling/xtask/src/main.rs b/tooling/xtask/src/main.rs index 05afe3c766829137a7c2ba6e73d57638624d5e6a..c442f1c509e28172b7283c95e518eee743b7730c 100644 --- a/tooling/xtask/src/main.rs +++ b/tooling/xtask/src/main.rs @@ -15,6 +15,7 @@ struct Args { enum CliCommand { /// Runs `cargo clippy`. Clippy(tasks::clippy::ClippyArgs), + Compliance(tasks::compliance::ComplianceArgs), Licenses(tasks::licenses::LicensesArgs), /// Checks that packages conform to a set of standards. PackageConformity(tasks::package_conformity::PackageConformityArgs), @@ -31,6 +32,7 @@ fn main() -> Result<()> { match args.command { CliCommand::Clippy(args) => tasks::clippy::run_clippy(args), + CliCommand::Compliance(args) => tasks::compliance::check_compliance(args), CliCommand::Licenses(args) => tasks::licenses::run_licenses(args), CliCommand::PackageConformity(args) => { tasks::package_conformity::run_package_conformity(args) diff --git a/tooling/xtask/src/tasks.rs b/tooling/xtask/src/tasks.rs index 80f504fa0345de0d5bc71c5b44c71846f04c50bc..ea67d0abc5fcbd8e85f40251a7997bc6fbbbca1f 100644 --- a/tooling/xtask/src/tasks.rs +++ b/tooling/xtask/src/tasks.rs @@ -1,4 +1,5 @@ pub mod clippy; +pub mod compliance; pub mod licenses; pub mod package_conformity; pub mod publish_gpui; diff --git a/tooling/xtask/src/tasks/compliance.rs b/tooling/xtask/src/tasks/compliance.rs new file mode 100644 index 0000000000000000000000000000000000000000..78cc32b23f3160ae950aaa5e374071dd107ec350 --- /dev/null +++ b/tooling/xtask/src/tasks/compliance.rs @@ -0,0 +1,135 @@ +use std::path::PathBuf; + +use anyhow::{Context, Result}; +use clap::Parser; + +use compliance::{ + checks::Reporter, + git::{CommitsFromVersionToHead, GetVersionTags, GitCommand, VersionTag}, + github::GitHubClient, + report::ReportReviewSummary, +}; + +#[derive(Parser)] +pub struct ComplianceArgs { + #[arg(value_parser = VersionTag::parse)] + // The version to be on the lookout for + pub(crate) version_tag: VersionTag, + #[arg(long)] + // The markdown file to write the compliance report to + report_path: PathBuf, + #[arg(long)] + // An optional branch to use instead of the determined version branch + branch: Option, +} + +impl ComplianceArgs { + pub(crate) fn version_tag(&self) -> &VersionTag { + &self.version_tag + } + + fn version_branch(&self) -> String { + self.branch.clone().unwrap_or_else(|| { + format!( + "v{major}.{minor}.x", + major = self.version_tag().version().major, + minor = self.version_tag().version().minor + ) + }) + } +} + +async fn check_compliance_impl(args: ComplianceArgs) -> Result<()> { + let app_id = std::env::var("GITHUB_APP_ID").context("Missing GITHUB_APP_ID")?; + let key = std::env::var("GITHUB_APP_KEY").context("Missing GITHUB_APP_KEY")?; + + let tag = args.version_tag(); + + let previous_version = GitCommand::run(GetVersionTags)? + .sorted() + .find_previous_minor_version(&tag) + .cloned() + .ok_or_else(|| { + anyhow::anyhow!( + "Could not find previous version for tag {tag}", + tag = tag.to_string() + ) + })?; + + println!( + "Checking compliance for version {} with version {} as base", + tag.version(), + previous_version.version() + ); + + let commits = GitCommand::run(CommitsFromVersionToHead::new( + previous_version, + args.version_branch(), + ))?; + + let Some(range) = commits.range() else { + anyhow::bail!("No commits found to check"); + }; + + println!("Checking commit range {range}, {} total", commits.len()); + + let client = GitHubClient::for_app( + app_id.parse().context("Failed to parse app ID as int")?, + key.as_ref(), + ) + .await?; + + println!("Initialized GitHub client for app ID {app_id}"); + + let report = Reporter::new(commits, &client).generate_report().await?; + + println!( + "Generated report for version {}", + args.version_tag().to_string() + ); + + let summary = report.summary(); + + println!( + "Applying compliance labels to {} pull requests", + summary.pull_requests + ); + + for report in report.errors() { + if let Some(pr_number) = report.commit.pr_number() { + println!("Adding review label to PR {}...", pr_number); + + client + .add_label_to_pull_request(compliance::github::PR_REVIEW_LABEL, pr_number) + .await?; + } + } + + let report_path = args.report_path.with_extension("md"); + + report.write_markdown(&report_path)?; + + println!("Wrote compliance report to {}", report_path.display()); + + match summary.review_summary() { + ReportReviewSummary::MissingReviews => Err(anyhow::anyhow!( + "Compliance check failed, found {} commits not reviewed", + summary.not_reviewed + )), + ReportReviewSummary::MissingReviewsWithErrors => Err(anyhow::anyhow!( + "Compliance check failed with {} unreviewed commits and {} other issues", + summary.not_reviewed, + summary.errors + )), + ReportReviewSummary::NoIssuesFound => { + println!("No issues found, compliance check passed."); + Ok(()) + } + } +} + +pub fn check_compliance(args: ComplianceArgs) -> Result<()> { + tokio::runtime::Runtime::new() + .context("Failed to create tokio runtime") + .and_then(|handle| handle.block_on(check_compliance_impl(args))) +} diff --git a/tooling/xtask/src/tasks/workflows.rs b/tooling/xtask/src/tasks/workflows.rs index 414c0b7fd8dc2a99027d8687bcf1d4dbe9c4bb85..387c739a1ac12d4d65d11f33777525c59f05f7f2 100644 --- a/tooling/xtask/src/tasks/workflows.rs +++ b/tooling/xtask/src/tasks/workflows.rs @@ -11,6 +11,7 @@ mod autofix_pr; mod bump_patch_version; mod cherry_pick; mod compare_perf; +mod compliance_check; mod danger; mod deploy_collab; mod extension_auto_bump; @@ -197,6 +198,7 @@ pub fn run_workflows(args: GenerateWorkflowArgs) -> Result<()> { WorkflowFile::zed(bump_patch_version::bump_patch_version), WorkflowFile::zed(cherry_pick::cherry_pick), WorkflowFile::zed(compare_perf::compare_perf), + WorkflowFile::zed(compliance_check::compliance_check), WorkflowFile::zed(danger::danger), WorkflowFile::zed(deploy_collab::deploy_collab), WorkflowFile::zed(extension_bump::extension_bump), diff --git a/tooling/xtask/src/tasks/workflows/autofix_pr.rs b/tooling/xtask/src/tasks/workflows/autofix_pr.rs index 2779dc2b01fa873bc050be4d873b9a5d502606bd..6fa7743275f36eda1746e7afdd4caabc429fec3c 100644 --- a/tooling/xtask/src/tasks/workflows/autofix_pr.rs +++ b/tooling/xtask/src/tasks/workflows/autofix_pr.rs @@ -2,7 +2,7 @@ use gh_workflow::*; use crate::tasks::workflows::{ runners, - steps::{self, FluentBuilder, NamedJob, named}, + steps::{self, FluentBuilder, NamedJob, RepositoryTarget, TokenPermissions, named}, vars::{self, StepOutput, WorkflowInput}, }; @@ -161,7 +161,13 @@ fn commit_changes(pr_number: &WorkflowInput, autofix_job: &NamedJob) -> NamedJob .add_env(("GITHUB_TOKEN", token)) } - let (authenticate, token) = steps::authenticate_as_zippy(); + let (authenticate, token) = steps::authenticate_as_zippy() + .for_repository(RepositoryTarget::current()) + .with_permissions([ + (TokenPermissions::Contents, Level::Write), + (TokenPermissions::Workflows, Level::Write), + ]) + .into(); named::job( Job::default() diff --git a/tooling/xtask/src/tasks/workflows/bump_patch_version.rs b/tooling/xtask/src/tasks/workflows/bump_patch_version.rs index 5ef149be29313bc2078dbc1f75a82845c3d3b666..7db348c1d5980c1b21780d9fe0af4e326f6283ca 100644 --- a/tooling/xtask/src/tasks/workflows/bump_patch_version.rs +++ b/tooling/xtask/src/tasks/workflows/bump_patch_version.rs @@ -63,7 +63,7 @@ fn run_bump_patch_version(branch: &WorkflowInput) -> steps::NamedJob { .add_env(("GITHUB_TOKEN", token)) } - let (authenticate, token) = steps::authenticate_as_zippy(); + let (authenticate, token) = steps::authenticate_as_zippy().into(); named::job( Job::default() diff --git a/tooling/xtask/src/tasks/workflows/cherry_pick.rs b/tooling/xtask/src/tasks/workflows/cherry_pick.rs index 5680bf6b23b85c17e68e531cecadfb31f091520d..46fb41094eb9fcea3cf40c4a289217f16855483b 100644 --- a/tooling/xtask/src/tasks/workflows/cherry_pick.rs +++ b/tooling/xtask/src/tasks/workflows/cherry_pick.rs @@ -2,7 +2,7 @@ use gh_workflow::*; use crate::tasks::workflows::{ runners, - steps::{self, NamedJob, named}, + steps::{self, NamedJob, RepositoryTarget, TokenPermissions, named}, vars::{StepOutput, WorkflowInput}, }; @@ -44,7 +44,14 @@ fn run_cherry_pick( .add_env(("GITHUB_TOKEN", token)) } - let (authenticate, token) = steps::authenticate_as_zippy(); + let (authenticate, token) = steps::authenticate_as_zippy() + .for_repository(RepositoryTarget::current()) + .with_permissions([ + (TokenPermissions::Contents, Level::Write), + (TokenPermissions::Workflows, Level::Write), + (TokenPermissions::PullRequests, Level::Write), + ]) + .into(); named::job( Job::default() diff --git a/tooling/xtask/src/tasks/workflows/compliance_check.rs b/tooling/xtask/src/tasks/workflows/compliance_check.rs new file mode 100644 index 0000000000000000000000000000000000000000..9e2f4ae1e588c545266ec5a8246ac9781c6b668b --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/compliance_check.rs @@ -0,0 +1,66 @@ +use gh_workflow::{Event, Expression, Job, Run, Schedule, Step, Workflow}; + +use crate::tasks::workflows::{ + runners, + steps::{self, CommonJobConditions, named}, + vars::{self, StepOutput}, +}; + +pub fn compliance_check() -> Workflow { + let check = scheduled_compliance_check(); + + named::workflow() + .on(Event::default().schedule([Schedule::new("30 17 * * 2")])) + .add_env(("CARGO_TERM_COLOR", "always")) + .add_job(check.name, check.job) +} + +fn scheduled_compliance_check() -> steps::NamedJob { + let determine_version_step = named::bash(indoc::indoc! {r#" + VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' crates/zed/Cargo.toml | tr -d '[:space:]') + if [ -z "$VERSION" ]; then + echo "Could not determine version from crates/zed/Cargo.toml" + exit 1 + fi + TAG="v${VERSION}-pre" + echo "Checking compliance for $TAG" + echo "tag=$TAG" >> "$GITHUB_OUTPUT" + "#}) + .id("determine-version"); + + let tag_output = StepOutput::new(&determine_version_step, "tag"); + + fn run_compliance_check(tag: &StepOutput) -> Step { + named::bash( + r#"cargo xtask compliance "$LATEST_TAG" --branch main --report-path target/compliance-report"#, + ) + .id("run-compliance-check") + .add_env(("LATEST_TAG", tag.to_string())) + .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID)) + .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY)) + } + + fn send_failure_slack_notification(tag: &StepOutput) -> Step { + named::bash(indoc::indoc! {r#" + MESSAGE="⚠️ Scheduled compliance check failed for upcoming preview release $LATEST_TAG: There are PRs with missing reviews." + + curl -X POST -H 'Content-type: application/json' \ + --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \ + "$SLACK_WEBHOOK" + "#}) + .if_condition(Expression::new("failure()")) + .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES)) + .add_env(("LATEST_TAG", tag.to_string())) + } + + named::job( + Job::default() + .with_repository_owner_guard() + .runs_on(runners::LINUX_SMALL) + .add_step(steps::checkout_repo().with_full_history()) + .add_step(steps::cache_rust_dependencies_namespace()) + .add_step(determine_version_step) + .add_step(run_compliance_check(&tag_output)) + .add_step(send_failure_slack_notification(&tag_output)), + ) +} diff --git a/tooling/xtask/src/tasks/workflows/extension_bump.rs b/tooling/xtask/src/tasks/workflows/extension_bump.rs index a1c2abc169f4348fd04a529c5a5b10b412464c9b..77d2acf7c830302407207950b1919b9002049460 100644 --- a/tooling/xtask/src/tasks/workflows/extension_bump.rs +++ b/tooling/xtask/src/tasks/workflows/extension_bump.rs @@ -359,7 +359,8 @@ fn trigger_release( let extension_registry = RepositoryTarget::new("zed-industries", &["extensions"]); let (generate_token, generated_token) = generate_token(&app_id.to_string(), &app_secret.to_string()) - .for_repository(extension_registry); + .for_repository(extension_registry) + .into(); let (get_extension_id, extension_id) = get_extension_id(); let (release_action, pull_request_number) = release_action(extension_id, tag, &generated_token); diff --git a/tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs b/tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs index 3a5d14603f97b43aacb581aaf3b970bac31b701f..1145cf2b5a70c30ac7212f6002e653d1396d55c4 100644 --- a/tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs +++ b/tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs @@ -6,6 +6,7 @@ use indoc::indoc; use serde_json::json; use crate::tasks::workflows::steps::CheckoutStep; +use crate::tasks::workflows::steps::TokenPermissions; use crate::tasks::workflows::steps::cache_rust_dependencies_namespace; use crate::tasks::workflows::vars::JobOutput; use crate::tasks::workflows::{ @@ -309,13 +310,17 @@ fn rollout_workflows_to_extension( } let (authenticate, token) = - generate_token(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY).for_repository( - RepositoryTarget::new("zed-extensions", &["${{ matrix.repo }}"]).permissions([ - ("permission-pull-requests".to_owned(), Level::Write), - ("permission-contents".to_owned(), Level::Write), - ("permission-workflows".to_owned(), Level::Write), - ]), - ); + generate_token(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY) + .for_repository(RepositoryTarget::new( + "zed-extensions", + &["${{ matrix.repo }}"], + )) + .with_permissions([ + (TokenPermissions::PullRequests, Level::Write), + (TokenPermissions::Contents, Level::Write), + (TokenPermissions::Workflows, Level::Write), + ]) + .into(); let (calculate_short_sha, short_sha) = get_short_sha(); @@ -372,10 +377,10 @@ fn create_rollout_tag(rollout_job: &NamedJob, filter_repos_input: &WorkflowInput } let (authenticate, token) = - generate_token(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY).for_repository( - RepositoryTarget::current() - .permissions([("permission-contents".to_owned(), Level::Write)]), - ); + generate_token(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY) + .for_repository(RepositoryTarget::current()) + .with_permissions([(TokenPermissions::Contents, Level::Write)]) + .into(); let job = Job::default() .needs([rollout_job.name.clone()]) diff --git a/tooling/xtask/src/tasks/workflows/publish_extension_cli.rs b/tooling/xtask/src/tasks/workflows/publish_extension_cli.rs index dad4bce45399bd8d0b4a6ff842f87830bd77484f..9f8d054241507af8597e2ff328263c440377686f 100644 --- a/tooling/xtask/src/tasks/workflows/publish_extension_cli.rs +++ b/tooling/xtask/src/tasks/workflows/publish_extension_cli.rs @@ -119,7 +119,8 @@ fn update_sha_in_extensions(publish_job: &NamedJob) -> NamedJob { let extensions_repo = RepositoryTarget::new("zed-industries", &["extensions"]); let (generate_token, generated_token) = generate_token(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY) - .for_repository(extensions_repo); + .for_repository(extensions_repo) + .into(); fn checkout_extensions_repo(token: &StepOutput) -> Step { named::uses( diff --git a/tooling/xtask/src/tasks/workflows/release.rs b/tooling/xtask/src/tasks/workflows/release.rs index 2646005021e052681c0fa16a258a1d0dad725390..3efe3e7c5c127e8580a9ca22d2d0e1ab4e7c80e9 100644 --- a/tooling/xtask/src/tasks/workflows/release.rs +++ b/tooling/xtask/src/tasks/workflows/release.rs @@ -1,11 +1,13 @@ -use gh_workflow::{Event, Expression, Push, Run, Step, Use, Workflow, ctx::Context}; +use gh_workflow::{Event, Expression, Job, Push, Run, Step, Use, Workflow, ctx::Context}; use indoc::formatdoc; use crate::tasks::workflows::{ run_bundling::{bundle_linux, bundle_mac, bundle_windows}, run_tests, runners::{self, Arch, Platform}, - steps::{self, FluentBuilder, NamedJob, dependant_job, named, release_job}, + steps::{ + self, CommonJobConditions, FluentBuilder, NamedJob, dependant_job, named, release_job, + }, vars::{self, StepOutput, assets}, }; @@ -22,6 +24,7 @@ pub(crate) fn release() -> Workflow { let check_scripts = run_tests::check_scripts(); let create_draft_release = create_draft_release(); + let compliance = compliance_check(); let bundle = ReleaseBundleJobs { linux_aarch64: bundle_linux( @@ -92,6 +95,7 @@ pub(crate) fn release() -> Workflow { .add_job(windows_clippy.name, windows_clippy.job) .add_job(check_scripts.name, check_scripts.job) .add_job(create_draft_release.name, create_draft_release.job) + .add_job(compliance.name, compliance.job) .map(|mut workflow| { for job in bundle.into_jobs() { workflow = workflow.add_job(job.name, job.job); @@ -149,6 +153,59 @@ pub(crate) fn create_sentry_release() -> Step { .add_with(("environment", "production")) } +fn compliance_check() -> NamedJob { + fn run_compliance_check() -> Step { + named::bash( + r#"cargo xtask compliance "$GITHUB_REF_NAME" --report-path "$COMPLIANCE_FILE_OUTPUT""#, + ) + .id("run-compliance-check") + .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID)) + .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY)) + } + + fn send_compliance_slack_notification() -> Step { + named::bash(indoc::indoc! {r#" + if [ "$COMPLIANCE_OUTCOME" == "success" ]; then + STATUS="✅ Compliance check passed for $GITHUB_REF_NAME" + else + STATUS="❌ Compliance check failed for $GITHUB_REF_NAME" + fi + + REPORT_CONTENT="" + if [ -f "$COMPLIANCE_FILE_OUTPUT" ]; then + REPORT_CONTENT=$(cat "$REPORT_FILE") + fi + + MESSAGE=$(printf "%s\n\n%s" "$STATUS" "$REPORT_CONTENT") + + curl -X POST -H 'Content-type: application/json' \ + --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \ + "$SLACK_WEBHOOK" + "#}) + .if_condition(Expression::new("always()")) + .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES)) + .add_env(( + "COMPLIANCE_OUTCOME", + "${{ steps.run-compliance-check.outcome }}", + )) + } + + named::job( + Job::default() + .add_env(("COMPLIANCE_FILE_PATH", "compliance.md")) + .with_repository_owner_guard() + .runs_on(runners::LINUX_DEFAULT) + .add_step( + steps::checkout_repo() + .with_full_history() + .with_ref(Context::github().ref_()), + ) + .add_step(steps::cache_rust_dependencies_namespace()) + .add_step(run_compliance_check()) + .add_step(send_compliance_slack_notification()), + ) +} + fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob { let expected_assets: Vec = assets::all().iter().map(|a| format!("\"{a}\"")).collect(); let expected_assets_json = format!("[{}]", expected_assets.join(", ")); @@ -171,15 +228,59 @@ fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob { "#, }; + fn run_post_upload_compliance_check() -> Step { + named::bash( + r#"cargo xtask compliance "$GITHUB_REF_NAME" --report-path target/compliance-report"#, + ) + .id("run-post-upload-compliance-check") + .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID)) + .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY)) + } + + fn send_post_upload_compliance_notification() -> Step { + named::bash(indoc::indoc! {r#" + if [ -z "$COMPLIANCE_OUTCOME" ] || [ "$COMPLIANCE_OUTCOME" == "skipped" ]; then + echo "Compliance check was skipped, not sending notification" + exit 0 + fi + + TAG="$GITHUB_REF_NAME" + + if [ "$COMPLIANCE_OUTCOME" == "success" ]; then + MESSAGE="✅ Post-upload compliance re-check passed for $TAG" + else + MESSAGE="❌ Post-upload compliance re-check failed for $TAG" + fi + + curl -X POST -H 'Content-type: application/json' \ + --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \ + "$SLACK_WEBHOOK" + "#}) + .if_condition(Expression::new("always()")) + .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES)) + .add_env(( + "COMPLIANCE_OUTCOME", + "${{ steps.run-post-upload-compliance-check.outcome }}", + )) + } + named::job( - dependant_job(deps).runs_on(runners::LINUX_SMALL).add_step( - named::bash(&validation_script).add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)), - ), + dependant_job(deps) + .runs_on(runners::LINUX_SMALL) + .add_step(named::bash(&validation_script).add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN))) + .add_step( + steps::checkout_repo() + .with_full_history() + .with_ref(Context::github().ref_()), + ) + .add_step(steps::cache_rust_dependencies_namespace()) + .add_step(run_post_upload_compliance_check()) + .add_step(send_post_upload_compliance_notification()), ) } fn auto_release_preview(deps: &[&NamedJob]) -> NamedJob { - let (authenticate, token) = steps::authenticate_as_zippy(); + let (authenticate, token) = steps::authenticate_as_zippy().into(); named::job( dependant_job(deps) @@ -255,7 +356,7 @@ fn create_draft_release() -> NamedJob { .add_step( steps::checkout_repo() .with_custom_fetch_depth(25) - .with_ref("${{ github.ref }}"), + .with_ref(Context::github().ref_()), ) .add_step(steps::script("script/determine-release-channel")) .add_step(steps::script("mkdir -p target/")) diff --git a/tooling/xtask/src/tasks/workflows/steps.rs b/tooling/xtask/src/tasks/workflows/steps.rs index 435c896e873235134e47f1a058d8b54b1110bbd9..15c2614ada81dd7c2e772f52c7072dac4324d1dc 100644 --- a/tooling/xtask/src/tasks/workflows/steps.rs +++ b/tooling/xtask/src/tasks/workflows/steps.rs @@ -517,20 +517,50 @@ pub fn git_checkout(ref_name: &dyn std::fmt::Display) -> Step { .add_env(("REF_NAME", ref_name.to_string())) } +/// Non-exhaustive list of the permissions to be set for a GitHub app token. +/// +/// See https://github.com/actions/create-github-app-token?tab=readme-ov-file#permission-permission-name +/// and beyond for a full list of available permissions. +#[allow(unused)] +pub(crate) enum TokenPermissions { + Contents, + Issues, + PullRequests, + Workflows, +} + +impl TokenPermissions { + pub fn environment_name(&self) -> &'static str { + match self { + TokenPermissions::Contents => "permission-contents", + TokenPermissions::Issues => "permission-issues", + TokenPermissions::PullRequests => "permission-pull-requests", + TokenPermissions::Workflows => "permission-workflows", + } + } +} + pub(crate) struct GenerateAppToken<'a> { job_name: String, app_id: &'a str, app_secret: &'a str, repository_target: Option, + permissions: Option>, } impl<'a> GenerateAppToken<'a> { - pub fn for_repository(self, repository_target: RepositoryTarget) -> (Step, StepOutput) { + pub fn for_repository(self, repository_target: RepositoryTarget) -> Self { Self { repository_target: Some(repository_target), ..self } - .into() + } + + pub fn with_permissions(self, permissions: impl Into>) -> Self { + Self { + permissions: Some(permissions.into()), + ..self + } } } @@ -553,26 +583,24 @@ impl<'a> From> for (Step, StepOutput) { RepositoryTarget { owner, repositories, - permissions, }| { input .when_some(owner, |input, owner| input.add("owner", owner)) .when_some(repositories, |input, repositories| { input.add("repositories", repositories) }) - .when_some(permissions, |input, permissions| { - permissions.into_iter().fold( - input, - |input, (permission, level)| { - input.add( - permission, - serde_json::to_value(&level).unwrap_or_default(), - ) - }, - ) - }) }, - ), + ) + .when_some(token.permissions, |input, permissions| { + permissions + .into_iter() + .fold(input, |input, (permission, level)| { + input.add( + permission.environment_name(), + serde_json::to_value(&level).unwrap_or_default(), + ) + }) + }), ); let generated_token = StepOutput::new(&step, "token"); @@ -583,7 +611,6 @@ impl<'a> From> for (Step, StepOutput) { pub(crate) struct RepositoryTarget { owner: Option, repositories: Option, - permissions: Option>, } impl RepositoryTarget { @@ -591,7 +618,6 @@ impl RepositoryTarget { Self { owner: Some(owner.to_string()), repositories: Some(repositories.join("\n")), - permissions: None, } } @@ -599,14 +625,6 @@ impl RepositoryTarget { Self { owner: None, repositories: None, - permissions: None, - } - } - - pub fn permissions(self, permissions: impl Into>) -> Self { - Self { - permissions: Some(permissions.into()), - ..self } } } @@ -618,8 +636,8 @@ pub(crate) fn generate_token<'a>( generate_token_with_job_name(app_id_source, app_secret_source) } -pub fn authenticate_as_zippy() -> (Step, StepOutput) { - generate_token_with_job_name(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY).into() +pub fn authenticate_as_zippy() -> GenerateAppToken<'static> { + generate_token_with_job_name(vars::ZED_ZIPPY_APP_ID, vars::ZED_ZIPPY_APP_PRIVATE_KEY) } fn generate_token_with_job_name<'a>( @@ -631,5 +649,6 @@ fn generate_token_with_job_name<'a>( app_id: app_id_source, app_secret: app_secret_source, repository_target: None, + permissions: None, } }